gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright (C) [2013] [The FURTHeR Project] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.utah.further.dts.impl.domain.association; import static edu.utah.further.dts.api.domain.namespace.DtsDataType.ASSOCIATION_TYPE; import java.util.List; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import com.apelon.dts.client.association.AssociationType; import com.apelon.dts.client.association.ItemsConnected; import com.apelon.dts.client.association.Purpose; import edu.utah.further.core.api.collections.CollectionUtil; import edu.utah.further.core.api.context.Implementation; import edu.utah.further.core.api.lang.ReflectionUtil; import edu.utah.further.dts.api.domain.association.DtsAssociationType; import edu.utah.further.dts.api.domain.concept.DtsConcept; import edu.utah.further.dts.api.domain.namespace.DtsDataType; import edu.utah.further.dts.impl.domain.AbstractDtsData; /** * A DTS concept association type that wraps an Apelon object. * <p> * -----------------------------------------------------------------------------------<br> * (c) 2008-2013 FURTHeR Project, AVP Health Sciences IT Office, University of Utah<br> * Contact: {@code <further@utah.edu>}<br> * Biomedical Informatics, 26 South 2000 East<br> * Room 5775 HSEB, Salt Lake City, UT 84112<br> * Day Phone: 1-801-581-4080<br> * ----------------------------------------------------------------------------------- * * @author Oren E. Livne {@code <oren.livne@utah.edu>} * @version Dec 8, 2008 */ @Implementation public class DtsAssociationTypeImpl extends AbstractDtsData implements DtsAssociationType { // ========================= CONSTANTS ================================= /** * JAXB name of this entity. */ static final String ENTITY_NAME = "associationType"; // ========================= FIELDS ==================================== /** * The Apelon DTS association to be wrapped. */ private final AssociationType associationType; // ========================= CONSTRUCTORS ============================== /** * A default c-tor. Required by JAXB. */ public DtsAssociationTypeImpl() { super(); this.associationType = null; } /** * Wrap an Apelon concept with our API. * * @param associationType * Apelon association type */ public DtsAssociationTypeImpl(final AssociationType associationType) { this(ASSOCIATION_TYPE, associationType); } /** * Wrap an Apelon concept with our API. * * @param type * DTS object type of the Apelon concept * @param associationType * Apelon association type */ protected DtsAssociationTypeImpl(final DtsDataType type, final AssociationType associationType) { super(type, associationType); this.associationType = associationType; } // ========================= IMPLEMENTATION: Object ==================== /** * @param o * @return * @see com.apelon.dts.client.concept.DTSConcept#equals(java.lang.Object) */ @Override public final boolean equals(final Object o) { // return namespace.equals(arg0); if (o == null) { return false; } if (o == this) { return true; } // Works only because this method is final!! // if (getClass() != o.getClass()) if (!ReflectionUtil.instanceOf(o, DtsAssociationTypeImpl.class)) { return false; } final DtsAssociationTypeImpl that = (DtsAssociationTypeImpl) o; return new EqualsBuilder() .append(getNamespaceId(), that.getNamespaceId()) .append(getName(), that.getName()) .isEquals(); } /** * @return * @see com.apelon.dts.client.common.DTSObject#hashCode() */ @Override public final int hashCode() { return new HashCodeBuilder() .append(getNamespaceId()) .append(getName()) .toHashCode(); } /** * @return * @see com.apelon.dts.client.common.DTSObject#toString() */ @Override public String toString() { return (associationType == null) ? null : associationType.toString(); } // ========================= IMPLEMENTATION: DtsData =================== /** * @return * @see edu.utah.further.dts.api.domain.namespace.DtsData#getChildren() */ @Override public List<DtsConcept> getChildren() { return CollectionUtil.newList(); } /** * @return * @see edu.utah.further.dts.api.domain.namespace.DtsData#getHasChildren() */ @Override public boolean getHasChildren() { return false; } // ========================= IMPLEMENTATION: DtsAssociationType ======== /** * @return * @see com.apelon.dts.client.association.AssociationType#getInverseName() */ @Override public String getInverseName() { return associationType.getInverseName(); } /** * @return * @see com.apelon.dts.client.association.AssociationType#getItemsConnected() */ public ItemsConnected getItemsConnected() { return associationType.getItemsConnected(); } /** * @return * @see com.apelon.dts.client.common.DTSObject#getNamespaceId() */ @Override public int getNamespaceId() { return associationType.getNamespaceId(); } /** * @return * @see com.apelon.dts.client.association.AssociationType#getPurpose() */ public Purpose getPurpose() { return associationType.getPurpose(); } /** * @param arg0 * @see com.apelon.dts.client.common.DTSObject#setCode(java.lang.String) */ @Override public void setCode(final String arg0) { associationType.setCode(arg0); } /** * @param arg0 * @see com.apelon.dts.client.common.DTSObject#setId(int) */ @Override public void setId(final int arg0) { associationType.setId(arg0); } /** * @param arg0 * @see com.apelon.dts.client.association.AssociationType#setInverseName(java.lang.String) */ public void setInverseName(final String arg0) { associationType.setInverseName(arg0); } /** * @param arg0 * @see com.apelon.dts.client.association.AssociationType#setItemsConnected(com.apelon.dts.client.association.ItemsConnected) */ public void setItemsConnected(final ItemsConnected arg0) { associationType.setItemsConnected(arg0); } /** * @param arg0 * @see com.apelon.dts.client.common.DTSObject#setName(java.lang.String) */ @Override public void setName(final String arg0) { associationType.setName(arg0); } /** * @param arg0 * @see com.apelon.dts.client.common.DTSObject#setNamespaceId(int) */ @Override public void setNamespaceId(final int arg0) { associationType.setNamespaceId(arg0); } /** * @param arg0 * @see com.apelon.dts.client.association.AssociationType#setPurpose(com.apelon.dts.client.association.Purpose) */ public void setPurpose(final Purpose arg0) { associationType.setPurpose(arg0); } // ========================= PRIVATE METHODS =========================== }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.databasemigrationservice.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p/> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dms-2016-01-01/DescribeReplicationSubnetGroups" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeReplicationSubnetGroupsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. * </p> */ private String marker; /** * <p> * A description of the replication subnet groups. * </p> */ private java.util.List<ReplicationSubnetGroup> replicationSubnetGroups; /** * <p> * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. * </p> * * @param marker * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. * </p> * * @return An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. */ public String getMarker() { return this.marker; } /** * <p> * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. * </p> * * @param marker * An optional pagination token provided by a previous request. If this parameter is specified, the response * includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeReplicationSubnetGroupsResult withMarker(String marker) { setMarker(marker); return this; } /** * <p> * A description of the replication subnet groups. * </p> * * @return A description of the replication subnet groups. */ public java.util.List<ReplicationSubnetGroup> getReplicationSubnetGroups() { return replicationSubnetGroups; } /** * <p> * A description of the replication subnet groups. * </p> * * @param replicationSubnetGroups * A description of the replication subnet groups. */ public void setReplicationSubnetGroups(java.util.Collection<ReplicationSubnetGroup> replicationSubnetGroups) { if (replicationSubnetGroups == null) { this.replicationSubnetGroups = null; return; } this.replicationSubnetGroups = new java.util.ArrayList<ReplicationSubnetGroup>(replicationSubnetGroups); } /** * <p> * A description of the replication subnet groups. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setReplicationSubnetGroups(java.util.Collection)} or * {@link #withReplicationSubnetGroups(java.util.Collection)} if you want to override the existing values. * </p> * * @param replicationSubnetGroups * A description of the replication subnet groups. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeReplicationSubnetGroupsResult withReplicationSubnetGroups(ReplicationSubnetGroup... replicationSubnetGroups) { if (this.replicationSubnetGroups == null) { setReplicationSubnetGroups(new java.util.ArrayList<ReplicationSubnetGroup>(replicationSubnetGroups.length)); } for (ReplicationSubnetGroup ele : replicationSubnetGroups) { this.replicationSubnetGroups.add(ele); } return this; } /** * <p> * A description of the replication subnet groups. * </p> * * @param replicationSubnetGroups * A description of the replication subnet groups. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeReplicationSubnetGroupsResult withReplicationSubnetGroups(java.util.Collection<ReplicationSubnetGroup> replicationSubnetGroups) { setReplicationSubnetGroups(replicationSubnetGroups); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMarker() != null) sb.append("Marker: ").append(getMarker()).append(","); if (getReplicationSubnetGroups() != null) sb.append("ReplicationSubnetGroups: ").append(getReplicationSubnetGroups()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeReplicationSubnetGroupsResult == false) return false; DescribeReplicationSubnetGroupsResult other = (DescribeReplicationSubnetGroupsResult) obj; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getReplicationSubnetGroups() == null ^ this.getReplicationSubnetGroups() == null) return false; if (other.getReplicationSubnetGroups() != null && other.getReplicationSubnetGroups().equals(this.getReplicationSubnetGroups()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getReplicationSubnetGroups() == null) ? 0 : getReplicationSubnetGroups().hashCode()); return hashCode; } @Override public DescribeReplicationSubnetGroupsResult clone() { try { return (DescribeReplicationSubnetGroupsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * This is free and unencumbered software released into the public domain. * * Anyone is free to copy, modify, publish, use, compile, sell, or * distribute this software, either in source code form or as a compiled * binary, for any purpose, commercial or non-commercial, and by any * means. * * In jurisdictions that recognize copyright laws, the author or authors * of this software dedicate any and all copyright interest in the * software to the public domain. We make this dedication for the benefit * of the public at large and to the detriment of our heirs and * successors. We intend this dedication to be an overt act of * relinquishment in perpetuity of all present and future rights to this * software under copyright law. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * For more information, please refer to <http://unlicense.org/> */ package net.as_development.asdk.tools.common; import java.net.URI; import org.apache.commons.lang3.StringUtils; //============================================================================= public class UriBuilder { //------------------------------------------------------------------------- private UriBuilder() throws Exception {} //------------------------------------------------------------------------- public static UriBuilder newUri() throws Exception { return new UriBuilder (); } //------------------------------------------------------------------------- public UriBuilder scheme(final String sScheme) throws Exception { m_sScheme = sScheme; return this; } //------------------------------------------------------------------------- public UriBuilder user(final String sUser) throws Exception { m_sUser = sUser; return this; } //------------------------------------------------------------------------- public UriBuilder password(final String sPassword) throws Exception { m_sPassword = sPassword; return this; } //------------------------------------------------------------------------- public UriBuilder host(final String sHost) throws Exception { m_sHost = sHost; return this; } //------------------------------------------------------------------------- public UriBuilder port(final Integer nPort) throws Exception { m_nPort = nPort; return this; } //------------------------------------------------------------------------- public UriBuilder path(final String sPath) throws Exception { m_sPath.append("/" ); m_sPath.append(sPath); return this; } //------------------------------------------------------------------------- public UriBuilder queryParameter(final String sQuery, final String sValue) throws Exception { if (m_sQueries.length() > 0) m_sQueries.append("&"); m_sQueries.append(sQuery); m_sQueries.append("=" ); m_sQueries.append(sValue); return this; } //------------------------------------------------------------------------- public UriBuilder queryParameterOpt(final boolean bAdd , final String sQuery, final String sValue) throws Exception { if ( ! bAdd) return this; if (m_sQueries.length() > 0) m_sQueries.append("&"); m_sQueries.append(sQuery); m_sQueries.append("=" ); m_sQueries.append(sValue); return this; } //------------------------------------------------------------------------- public URI toUri () throws Exception { final String sUri = impl_toUriString (); final URI aUri = new URI (sUri); return aUri; } //------------------------------------------------------------------------- private String impl_toUriString () throws Exception { String sPath = null ; String sQuery = null ; String sFragment = null ; boolean bAuthority = false; if (m_sPath.length() > 0) sPath = m_sPath.toString(); if (m_sQueries.length() > 0) sQuery = m_sQueries.toString(); if (m_sFragments.length() > 0) sFragment = m_sFragments.toString(); final StringBuilder sUri = new StringBuilder (256); sUri.append(m_sScheme); sUri.append("://" ); if ( ! StringUtils.isEmpty(m_sUser)) { sUri.append(m_sUser); bAuthority = true; } if ( ! StringUtils.isEmpty(m_sPassword)) { if (bAuthority) sUri.append(":"); sUri.append(m_sPassword); bAuthority = true; } if (bAuthority) sUri.append("@"); if ( ! StringUtils.isEmpty(m_sHost)) sUri.append(m_sHost ); if (m_nPort != null) { sUri.append(":" ); sUri.append(m_nPort); } if ( ! StringUtils.isEmpty(sPath)) sUri.append(sPath); if ( ! StringUtils.isEmpty(sQuery)) { sUri.append("?" ); sUri.append(sQuery); } if ( ! StringUtils.isEmpty(sFragment)) { sUri.append("#" ); sUri.append(sFragment); } return sUri.toString (); } //------------------------------------------------------------------------- private StringBuilder m_sPath = new StringBuilder (256); //------------------------------------------------------------------------- private StringBuilder m_sQueries = new StringBuilder (256); //------------------------------------------------------------------------- private StringBuilder m_sFragments = new StringBuilder (256); //------------------------------------------------------------------------- private String m_sScheme = null; //------------------------------------------------------------------------- private String m_sUser = null; //------------------------------------------------------------------------- private String m_sPassword = null; //------------------------------------------------------------------------- private String m_sHost = null; //------------------------------------------------------------------------- private Integer m_nPort = null; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a; import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset; import static org.apache.hadoop.fs.contract.ContractTestUtils.rm; import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled; import static org.apache.hadoop.test.LambdaTestUtils.intercept; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.junit.Test; /** * Concrete class that extends {@link AbstractTestS3AEncryption} * and tests SSE-C encryption. */ public class ITestS3AEncryptionSSEC extends AbstractTestS3AEncryption { @Override protected Configuration createConfiguration() { Configuration conf = super.createConfiguration(); S3ATestUtils.disableFilesystemCaching(conf); conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM, getSSEAlgorithm().getMethod()); conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs="); return conf; } /** * This will create and write to a file using encryption key A, then attempt * to read from it again with encryption key B. This will not work as it * cannot decrypt the file. * * This is expected AWS S3 SSE-C behavior. * * @throws Exception */ @Test public void testCreateFileAndReadWithDifferentEncryptionKey() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); final Path[] path = new Path[1]; intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { int len = 2048; describe("Create an encrypted file of size " + len); String src = createFilename(len); path[0] = writeThenReadFile(src, len); //extract the test FS FileSystem fileSystem = createNewFileSystemWithSSECKey( "kX7SdwVc/1VXJr76kfKnkQ3ONYhxianyL2+C3rPVT9s="); byte[] data = dataset(len, 'a', 'z'); ContractTestUtils.verifyFileContents(fileSystem, path[0], data); throw new Exception("Fail"); }); } /** * While each object has it's own key and should be distinct, this verifies * that hadoop treats object keys as a filesystem path. So if a top level * dir is encrypted with keyA, a sublevel dir cannot be accessed with a * different keyB. * * This is expected AWS S3 SSE-C behavior. * * @throws Exception */ @Test public void testCreateSubdirWithDifferentKey() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); final Path[] path = new Path[1]; intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { path[0] = S3ATestUtils.createTestPath( new Path(createFilename("dir/")) ); Path nestedDirectory = S3ATestUtils.createTestPath( new Path(createFilename("dir/nestedDir/")) ); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "G61nz31Q7+zpjJWbakxfTOZW4VS0UmQWAq2YXhcTXoo="); getFileSystem().mkdirs(path[0]); fsKeyB.mkdirs(nestedDirectory); throw new Exception("Exception should be thrown."); }); rm(getFileSystem(), path[0], true, false); } /** * Ensures a file can't be created with keyA and then renamed with a different * key. * * This is expected AWS S3 SSE-C behavior. * * @throws Exception */ @Test public void testCreateFileThenMoveWithDifferentSSECKey() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); final Path[] path = new Path[1]; intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { int len = 2048; String src = createFilename(len); path[0] = writeThenReadFile(src, len); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "NTx0dUPrxoo9+LbNiT/gqf3z9jILqL6ilismFmJO50U="); fsKeyB.rename(path[0], new Path(createFilename("different-path.txt"))); throw new Exception("Exception should be thrown."); }); } /** * General test to make sure move works with SSE-C with the same key, unlike * with multiple keys. * * @throws Exception */ @Test public void testRenameFile() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); String src = createFilename("original-path.txt"); Path path = writeThenReadFile(src, 2048); Path newPath = path(createFilename("different-path.txt")); getFileSystem().rename(path, newPath); byte[] data = dataset(2048, 'a', 'z'); ContractTestUtils.verifyFileContents(getFileSystem(), newPath, data); } /** * It is possible to list the contents of a directory up to the actual * end of the nested directories. This is due to how S3A mocks the * directories and how prefixes work in S3. * @throws Exception */ @Test public void testListEncryptedDir() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); Path nestedDirectory = S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ); assertTrue(getFileSystem().mkdirs(nestedDirectory)); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "msdo3VvvZznp66Gth58a91Hxe/UpExMkwU9BHkIjfW8="); fsKeyB.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/")) ), true); fsKeyB.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/b/")) ), true); //Until this point, no exception is thrown about access intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { fsKeyB.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ), false); throw new Exception("Exception should be thrown."); }); Configuration conf = this.createConfiguration(); conf.unset(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM); conf.unset(Constants.SERVER_SIDE_ENCRYPTION_KEY); S3AContract contract = (S3AContract) createContract(conf); contract.init(); FileSystem unencryptedFileSystem = contract.getTestFileSystem(); //unencrypted can access until the final directory unencryptedFileSystem.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/")) ), true); unencryptedFileSystem.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/b/")) ), true); intercept(org.apache.hadoop.fs.s3a.AWSS3IOException.class, "Bad Request (Service: Amazon S3; Status Code: 400; Error" + " Code: 400 Bad Request;", () -> { unencryptedFileSystem.listFiles(S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ), false); throw new Exception("Exception should be thrown."); }); rm(getFileSystem(), path(createFilename("/")), true, false); } /** * Much like the above list encrypted directory test, you cannot get the * metadata of an object without the correct encryption key. * @throws Exception */ @Test public void testListStatusEncryptedDir() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); Path nestedDirectory = S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ); assertTrue(getFileSystem().mkdirs(nestedDirectory)); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "msdo3VvvZznp66Gth58a91Hxe/UpExMkwU9BHkIjfW8="); fsKeyB.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/")))); fsKeyB.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/b/")))); //Until this point, no exception is thrown about access intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { fsKeyB.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")))); throw new Exception("Exception should be thrown."); }); //Now try it with an unencrypted filesystem. Configuration conf = this.createConfiguration(); conf.unset(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM); conf.unset(Constants.SERVER_SIDE_ENCRYPTION_KEY); S3AContract contract = (S3AContract) createContract(conf); contract.init(); FileSystem unencryptedFileSystem = contract.getTestFileSystem(); //unencrypted can access until the final directory unencryptedFileSystem.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/")))); unencryptedFileSystem.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/b/")))); intercept(org.apache.hadoop.fs.s3a.AWSS3IOException.class, "Bad Request (Service: Amazon S3; Status Code: 400; Error Code: 400" + " Bad Request;", () -> { unencryptedFileSystem.listStatus(S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")))); throw new Exception("Exception should be thrown."); }); rm(getFileSystem(), path(createFilename("/")), true, false); } /** * Much like trying to access a encrypted directory, an encrypted file cannot * have its metadata read, since both are technically an object. * @throws Exception */ @Test public void testListStatusEncryptedFile() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); Path nestedDirectory = S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ); assertTrue(getFileSystem().mkdirs(nestedDirectory)); String src = createFilename("/a/b/c/fileToStat.txt"); Path fileToStat = writeThenReadFile(src, 2048); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "msdo3VvvZznp66Gth58a91Hxe/UpExMkwU9BHkIjfW8="); //Until this point, no exception is thrown about access intercept(java.nio.file.AccessDeniedException.class, "Service: Amazon S3; Status Code: 403;", () -> { fsKeyB.listStatus(S3ATestUtils.createTestPath(fileToStat)); throw new Exception("Exception should be thrown."); }); rm(getFileSystem(), path(createFilename("/")), true, false); } /** * It is possible to delete directories without the proper encryption key and * the hierarchy above it. * * @throws Exception */ @Test public void testDeleteEncryptedObjectWithDifferentKey() throws Exception { assumeEnabled(); skipIfEncryptionTestsDisabled(getConfiguration()); Path nestedDirectory = S3ATestUtils.createTestPath( path(createFilename("/a/b/c/")) ); assertTrue(getFileSystem().mkdirs(nestedDirectory)); String src = createFilename("/a/b/c/filetobedeleted.txt"); Path fileToDelete = writeThenReadFile(src, 2048); FileSystem fsKeyB = createNewFileSystemWithSSECKey( "msdo3VvvZznp66Gth58a91Hxe/UpExMkwU9BHkIjfW8="); intercept(java.nio.file.AccessDeniedException.class, "Forbidden (Service: Amazon S3; Status Code: 403; Error Code: " + "403 Forbidden", () -> { fsKeyB.delete(fileToDelete, false); throw new Exception("Exception should be thrown."); }); //This is possible fsKeyB.delete(S3ATestUtils.createTestPath( path(createFilename("/a/b/c/"))), true); fsKeyB.delete(S3ATestUtils.createTestPath( path(createFilename("/a/b/"))), true); fsKeyB.delete(S3ATestUtils.createTestPath( path(createFilename("/a/"))), true); } private FileSystem createNewFileSystemWithSSECKey(String sseCKey) throws IOException { Configuration conf = this.createConfiguration(); conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, sseCKey); S3AContract contract = (S3AContract) createContract(conf); contract.init(); FileSystem fileSystem = contract.getTestFileSystem(); return fileSystem; } @Override protected S3AEncryptionMethods getSSEAlgorithm() { return S3AEncryptionMethods.SSE_C; } }
package net.dongliu.apk.parser.bean; import net.dongliu.apk.parser.AbstractApkFile; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; /** * Apk meta info * * @author dongliu */ public class ApkMeta { private final String packageName; private final String label; private final String icon; private final String versionName; private final Long versionCode; private final Long revisionCode; private String sharedUserId; private String sharedUserLabel; private final String split; private final String configForSplit; private final boolean isFeatureSplit; private final boolean isSplitRequired; private final boolean isolatedSplits; private final String installLocation; private final String minSdkVersion; private final String targetSdkVersion; @Nullable private final String maxSdkVersion; @Nullable private final String compileSdkVersion; @Nullable private final String compileSdkVersionCodename; @Nullable private final String platformBuildVersionCode; @Nullable private final String platformBuildVersionName; private final GlEsVersion glEsVersion; private final boolean anyDensity; private final boolean smallScreens; private final boolean normalScreens; private final boolean largeScreens; private final List<String> usesPermissions; private final List<UseFeature> usesFeatures; private final List<Permission> permissions; private ApkMeta(Builder builder) { packageName = builder.packageName; label = builder.label; icon = builder.icon; versionName = builder.versionName; versionCode = builder.versionCode; revisionCode = builder.revisionCode; sharedUserId = builder.sharedUserId; sharedUserLabel = builder.sharedUserLabel; split = builder.split; configForSplit = builder.configForSplit; isFeatureSplit = builder.isFeatureSplit; isSplitRequired = builder.isSplitRequired; isolatedSplits = builder.isolatedSplits; installLocation = builder.installLocation; minSdkVersion = builder.minSdkVersion; targetSdkVersion = builder.targetSdkVersion; maxSdkVersion = builder.maxSdkVersion; compileSdkVersion = builder.compileSdkVersion; compileSdkVersionCodename = builder.compileSdkVersionCodename; platformBuildVersionCode = builder.platformBuildVersionCode; platformBuildVersionName = builder.platformBuildVersionName; glEsVersion = builder.glEsVersion; anyDensity = builder.anyDensity; smallScreens = builder.smallScreens; normalScreens = builder.normalScreens; largeScreens = builder.largeScreens; usesPermissions = builder.usesPermissions; usesFeatures = builder.usesFeatures; permissions = builder.permissions; } public static Builder newBuilder() { return new Builder(); } public String getPackageName() { return packageName; } public String getVersionName() { return versionName; } public Long getVersionCode() { return versionCode; } public Long getRevisionCode() { return revisionCode; } public String getSharedUserId() { return sharedUserId; } public String getSharedUserLabel() { return sharedUserLabel; } public String getSplit() { return split; } public String getConfigForSplit() { return configForSplit; } public boolean isFeatureSplit() { return isFeatureSplit; } public boolean isSplitRequired() { return isSplitRequired; } public boolean isIsolatedSplits() { return isolatedSplits; } public String getMinSdkVersion() { return minSdkVersion; } public String getTargetSdkVersion() { return targetSdkVersion; } @Nullable public String getMaxSdkVersion() { return maxSdkVersion; } @Nullable public String getCompileSdkVersion() { return compileSdkVersion; } @Nullable public String getCompileSdkVersionCodename() { return compileSdkVersionCodename; } @Nullable public String getPlatformBuildVersionCode() { return platformBuildVersionCode; } @Nullable public String getPlatformBuildVersionName() { return platformBuildVersionName; } public List<String> getUsesPermissions() { return usesPermissions; } public void addUsesPermission(String permission) { this.usesPermissions.add(permission); } /** * the icon file path in apk * * @return null if not found * @deprecated use {@link AbstractApkFile#getAllIcons()} instead. */ @Deprecated public String getIcon() { return icon; } /** * alias for getLabel */ public String getName() { return label; } /** * get the apk's title(name) */ public String getLabel() { return label; } public boolean isAnyDensity() { return anyDensity; } public boolean isSmallScreens() { return smallScreens; } public boolean isNormalScreens() { return normalScreens; } public boolean isLargeScreens() { return largeScreens; } public GlEsVersion getGlEsVersion() { return glEsVersion; } public List<UseFeature> getUsesFeatures() { return usesFeatures; } public void addUseFeatures(UseFeature useFeature) { this.usesFeatures.add(useFeature); } public String getInstallLocation() { return installLocation; } public void addPermission(Permission permission) { this.permissions.add(permission); } public List<Permission> getPermissions() { return this.permissions; } @Override public String toString() { return "packageName: \t" + packageName + "\n" + "label: \t" + label + "\n" + "icon: \t" + icon + "\n" + "versionName: \t" + versionName + "\n" + "versionCode: \t" + versionCode + "\n" + "minSdkVersion: \t" + minSdkVersion + "\n" + "targetSdkVersion: \t" + targetSdkVersion + "\n" + "maxSdkVersion: \t" + maxSdkVersion; } public static final class Builder { private String packageName; private String label; private String icon; private String versionName; private Long versionCode; private Long revisionCode; private String sharedUserId; private String sharedUserLabel; private String split; private String configForSplit; private boolean isFeatureSplit; private boolean isSplitRequired; private boolean isolatedSplits; private String installLocation; private String minSdkVersion; private String targetSdkVersion; private String maxSdkVersion; private String compileSdkVersion; private String compileSdkVersionCodename; private String platformBuildVersionCode; private String platformBuildVersionName; private GlEsVersion glEsVersion; private boolean anyDensity; private boolean smallScreens; private boolean normalScreens; private boolean largeScreens; private List<String> usesPermissions = new ArrayList<>(); private List<UseFeature> usesFeatures = new ArrayList<>(); private List<Permission> permissions = new ArrayList<>(); private Builder() { } public Builder setPackageName(String packageName) { this.packageName = packageName; return this; } public Builder setLabel(String label) { this.label = label; return this; } public Builder setIcon(String icon) { this.icon = icon; return this; } public Builder setVersionName(String versionName) { this.versionName = versionName; return this; } public Builder setVersionCode(Long versionCode) { this.versionCode = versionCode; return this; } public Builder setRevisionCode(Long revisionCode) { this.revisionCode = revisionCode; return this; } public Builder setSharedUserId(String sharedUserId) { this.sharedUserId = sharedUserId; return this; } public Builder setSharedUserLabel(String sharedUserLabel) { this.sharedUserLabel = sharedUserLabel; return this; } public Builder setSplit(String split) { this.split = split; return this; } public Builder setConfigForSplit(String configForSplit) { this.configForSplit = configForSplit; return this; } public Builder setIsFeatureSplit(boolean isFeatureSplit) { this.isFeatureSplit = isFeatureSplit; return this; } public Builder setIsSplitRequired(boolean isSplitRequired) { this.isSplitRequired = isSplitRequired; return this; } public Builder setIsolatedSplits(boolean isolatedSplits) { this.isolatedSplits = isolatedSplits; return this; } public Builder setInstallLocation(String installLocation) { this.installLocation = installLocation; return this; } public Builder setMinSdkVersion(String minSdkVersion) { this.minSdkVersion = minSdkVersion; return this; } public Builder setTargetSdkVersion(String targetSdkVersion) { this.targetSdkVersion = targetSdkVersion; return this; } public Builder setMaxSdkVersion(String maxSdkVersion) { this.maxSdkVersion = maxSdkVersion; return this; } public Builder setCompileSdkVersion(String compileSdkVersion) { this.compileSdkVersion = compileSdkVersion; return this; } public Builder setCompileSdkVersionCodename(String compileSdkVersionCodename) { this.compileSdkVersionCodename = compileSdkVersionCodename; return this; } public Builder setPlatformBuildVersionCode(String platformBuildVersionCode) { this.platformBuildVersionCode = platformBuildVersionCode; return this; } public Builder setPlatformBuildVersionName(String platformBuildVersionName) { this.platformBuildVersionName = platformBuildVersionName; return this; } public Builder setGlEsVersion(GlEsVersion glEsVersion) { this.glEsVersion = glEsVersion; return this; } public Builder setAnyDensity(boolean anyDensity) { this.anyDensity = anyDensity; return this; } public Builder setSmallScreens(boolean smallScreens) { this.smallScreens = smallScreens; return this; } public Builder setNormalScreens(boolean normalScreens) { this.normalScreens = normalScreens; return this; } public Builder setLargeScreens(boolean largeScreens) { this.largeScreens = largeScreens; return this; } public Builder addUsesPermission(String usesPermission) { this.usesPermissions.add(usesPermission); return this; } public Builder addUsesFeature(UseFeature usesFeature) { this.usesFeatures.add(usesFeature); return this; } public Builder addPermissions(Permission permission) { this.permissions.add(permission); return this; } public ApkMeta build() { return new ApkMeta(this); } } }
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gcloud.examples; import com.google.gcloud.datastore.Datastore; import com.google.gcloud.datastore.DatastoreFactory; import com.google.gcloud.datastore.DatastoreOptions; import com.google.gcloud.datastore.DateTime; import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.FullEntity; import com.google.gcloud.datastore.IncompleteKey; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; import com.google.gcloud.datastore.Query; import com.google.gcloud.datastore.Query.ResultType; import com.google.gcloud.datastore.QueryResults; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; import com.google.gcloud.datastore.Transaction; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; /** * An example of using the Google Cloud Datastore. * <p> * This example adds, display or clear comments for a given user. * <p> * Steps needed for running the example:<ol> * <li>login using gcloud SDK - {@code gcloud auth login}.</li> * <li>compile using maven - {@code mvn compile}</li> * <li>run using maven - {@code mvn exec:java * -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" * -Dexec.args="[projectId] [user] [delete|display|add comment]"}</li> * </ol> */ public class DatastoreExample { private static final String USER_KIND = "_DS_EXAMPLE_USER"; private static final String COMMENT_KIND = "_DS_EXAMPLE_COMMENT"; private static final String NAMESPACE = "gcloud_java_example"; private static final String DEFAULT_ACTION = "display"; private static final Map<String, DatastoreAction> ACTIONS = new HashMap<>(); private interface DatastoreAction { void run(Transaction tx, Key userKey, String... args); String getRequiredParams(); } private static class DeleteAction implements DatastoreAction { @Override public void run(Transaction tx, Key userKey, String... args) { Entity user = tx.get(userKey); if (user == null) { System.out.println("Nothing to delete, user does not exists."); return; } Query<Key> query = Query.keyQueryBuilder() .namespace(NAMESPACE) .kind(COMMENT_KIND) .filter(PropertyFilter.hasAncestor(userKey)) .build(); QueryResults<Key> comments = tx.run(query); int count = 0; while (comments.hasNext()) { tx.delete(comments.next()); count++; } tx.delete(userKey); System.out.printf("Deleting user '%s' and %d comment[s].%n", userKey.name(), count); } @Override public String getRequiredParams() { return ""; } } private static class DisplayAction implements DatastoreAction { @Override public void run(Transaction tx, Key userKey, String... args) { Entity user = tx.get(userKey); if (user == null) { System.out.println("No comments for '" + userKey.name() + "'."); return; } System.out.printf("User '%s' has %d comment[s].%n", userKey.name(), user.getLong("count")); // ORDER BY timestamp"; String gql = "SELECT * FROM " + COMMENT_KIND + " WHERE __key__ HAS ANCESTOR @1"; Query<Entity> query = Query.gqlQueryBuilder(ResultType.ENTITY, gql) .namespace(NAMESPACE) .addBinding(userKey) .build(); QueryResults<Entity> results = tx.run(query); // We could have added "ORDER BY timestamp" to the query to avoid the sorting bellow // but that would require adding an ancestor index for timestamp // see: https://cloud.google.com/datastore/docs/tools/indexconfig Map<DateTime, String> sortedComments = new TreeMap<>(); while (results.hasNext()) { Entity result = results.next(); sortedComments.put(result.getDateTime("timestamp"), result.getString("content")); } for (Map.Entry<DateTime, String> entry : sortedComments.entrySet()) { System.out.printf("\t%s: %s%n", entry.getKey(), entry.getValue()); } } @Override public String getRequiredParams() { return ""; } } private static class AddAction implements DatastoreAction { @Override public void run(Transaction tx, Key userKey, String... args) { Entity user = tx.get(userKey); if (user == null) { System.out.println("Adding a new user."); user = Entity.builder(userKey) .set("count", 1L) .build(); tx.add(user); } else { user = Entity.builder(user).set("count", user.getLong("count") + 1L).build(); tx.update(user); } String content = "No comment."; if (args.length > 0) { StringBuilder stBuilder = new StringBuilder(); for (String arg : args) { stBuilder.append(arg).append(' '); } stBuilder.setLength(stBuilder.length() - 1); content = stBuilder.toString(); } IncompleteKey commentKey = IncompleteKey.builder(userKey, COMMENT_KIND).build(); FullEntity<IncompleteKey> comment = FullEntity.builder(commentKey) .set("content", content) .set("timestamp", DateTime.now()) .build(); tx.addWithDeferredIdAllocation(comment); System.out.println("Adding a comment to user '" + userKey.name() + "'."); } @Override public String getRequiredParams() { return "comment"; } } static { ACTIONS.put("delete", new DeleteAction()); ACTIONS.put("add", new AddAction()); ACTIONS.put("display", new DisplayAction()); } public static void main(String... args) { String projectId = args.length > 0 ? args[0] : null; // If you want to access a local Datastore running via the gcd sdk, do // DatastoreOptions options = DatastoreOptions.builder() // .projectId(projectId) // .namespace(NAMESPACE) // .host("http://localhost:8080") // .build(); DatastoreOptions options = DatastoreOptions.builder() .projectId(projectId) .namespace(NAMESPACE) .build(); String name = args.length > 1 ? args[1] : System.getProperty("user.name"); Datastore datastore = DatastoreFactory.instance().get(options); KeyFactory keyFactory = datastore.newKeyFactory().kind(USER_KIND); Key key = keyFactory.newKey(name); String actionName = args.length > 2 ? args[2].toLowerCase() : DEFAULT_ACTION; DatastoreAction action = ACTIONS.get(actionName); if (action == null) { StringBuilder actionAndParams = new StringBuilder(); for (Map.Entry<String, DatastoreAction> entry : ACTIONS.entrySet()) { actionAndParams.append(entry.getKey()); String param = entry.getValue().getRequiredParams(); if (param != null && !param.isEmpty()) { actionAndParams.append(' ').append(param); } actionAndParams.append('|'); } actionAndParams.setLength(actionAndParams.length() - 1); System.out.printf("Usage: %s [projectId] [user] [%s]%n", DatastoreExample.class.getSimpleName(), actionAndParams); return; } args = args.length > 3 ? Arrays.copyOfRange(args, 3, args.length): new String []{}; Transaction tx = datastore.newTransaction(); try { action.run(tx, key, args); tx.commit(); } finally { if (tx.active()) { tx.rollback(); } } } }
package com.thaiopensource.xml.dtd.app; import java.io.IOException; import com.thaiopensource.xml.dtd.om.AttributeDefault; import com.thaiopensource.xml.dtd.om.AttributeDefaultVisitor; import com.thaiopensource.xml.dtd.om.AttributeGroup; import com.thaiopensource.xml.dtd.om.AttributeGroupVisitor; import com.thaiopensource.xml.dtd.om.Datatype; import com.thaiopensource.xml.dtd.om.DatatypeVisitor; import com.thaiopensource.xml.dtd.om.Def; import com.thaiopensource.xml.dtd.om.Dtd; import com.thaiopensource.xml.dtd.om.EnumGroup; import com.thaiopensource.xml.dtd.om.EnumGroupVisitor; import com.thaiopensource.xml.dtd.om.Flag; import com.thaiopensource.xml.dtd.om.FlagRef; import com.thaiopensource.xml.dtd.om.FlagVisitor; import com.thaiopensource.xml.dtd.om.ModelGroup; import com.thaiopensource.xml.dtd.om.ModelGroupVisitor; import com.thaiopensource.xml.dtd.om.NameSpec; import com.thaiopensource.xml.dtd.om.NameSpecVisitor; import com.thaiopensource.xml.dtd.om.TopLevel; import com.thaiopensource.xml.dtd.om.TopLevelVisitor; import com.thaiopensource.xml.em.ExternalId; import com.thaiopensource.xml.out.XmlWriter; public class SchemaWriter implements TopLevelVisitor, ModelGroupVisitor, AttributeGroupVisitor, DatatypeVisitor, EnumGroupVisitor, FlagVisitor, NameSpecVisitor, AttributeDefaultVisitor { private final XmlWriter w; public SchemaWriter (final XmlWriter writer) { this.w = writer; } public void writeDtd (final Dtd dtd) throws IOException { final String enc = dtd.getEncoding (); if (enc != null) w.writeXmlDecl (enc); w.startElement ("doctype"); try { dtd.accept (this); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { throw (IOException) e; } w.endElement (); } public void elementDecl (final NameSpec nameSpec, final ModelGroup modelGroup) throws Exception { w.startElement ("element"); nameSpec.accept (this); modelGroup.accept (this); w.endElement (); } public void attlistDecl (final NameSpec nameSpec, final AttributeGroup attributeGroup) throws Exception { w.startElement ("attlist"); nameSpec.accept (this); attributeGroup.accept (this); w.endElement (); } public void processingInstruction (final String target, final String value) throws Exception { w.startElement ("processingInstruction"); w.attribute ("target", target); w.characters (value); w.endElement (); } public void comment (final String value) throws Exception { w.startElement ("comment"); w.characters (value); w.endElement (); } public void modelGroupDef (final String name, final ModelGroup modelGroup) throws Exception { w.startElement ("modelGroup"); w.attribute ("name", name); modelGroup.accept (this); w.endElement (); } public void attributeGroupDef (final String name, final AttributeGroup attributeGroup) throws Exception { w.startElement ("attributeGroup"); w.attribute ("name", name); attributeGroup.accept (this); w.endElement (); } public void enumGroupDef (final String name, final EnumGroup enumGroup) throws Exception { w.startElement ("enumGroup"); w.attribute ("name", name); enumGroup.accept (this); w.endElement (); } public void datatypeDef (final String name, final Datatype datatype) throws Exception { w.startElement ("datatype"); w.attribute ("name", name); datatype.accept (this); w.endElement (); } public void flagDef (final String name, final Flag flag) throws Exception { w.startElement ("flag"); w.attribute ("name", name); flag.accept (this); w.endElement (); } public void attributeDefaultDef (final String name, final AttributeDefault attributeDefault) throws Exception { w.startElement ("attributeDefault"); w.attribute ("name", name); attributeDefault.accept (this); w.endElement (); } public void choice (final ModelGroup [] members) throws Exception { w.startElement ("choice"); for (final ModelGroup member : members) member.accept (this); w.endElement (); } public void sequence (final ModelGroup [] members) throws Exception { w.startElement ("sequence"); for (final ModelGroup member : members) member.accept (this); w.endElement (); } public void oneOrMore (final ModelGroup member) throws Exception { w.startElement ("oneOrMore"); member.accept (this); w.endElement (); } public void zeroOrMore (final ModelGroup member) throws Exception { w.startElement ("zeroOrMore"); member.accept (this); w.endElement (); } public void optional (final ModelGroup member) throws Exception { w.startElement ("optional"); member.accept (this); w.endElement (); } public void modelGroupRef (final String name, final ModelGroup modelGroup) throws Exception { w.startElement ("modelGroupRef"); w.attribute ("name", name); w.endElement (); } public void elementRef (final NameSpec nameSpec) throws Exception { w.startElement ("elementRef"); nameSpec.accept (this); w.endElement (); } public void pcdata () throws Exception { w.startElement ("pcdata"); w.endElement (); } public void any () throws Exception { w.startElement ("any"); w.endElement (); } public void attribute (final NameSpec nameSpec, final Datatype datatype, final AttributeDefault attributeDefault) throws Exception { w.startElement ("attribute"); nameSpec.accept (this); datatype.accept (this); attributeDefault.accept (this); w.endElement (); } public void attributeGroupRef (final String name, final AttributeGroup attributeGroup) throws Exception { w.startElement ("attributeGroupRef"); w.attribute ("name", name); w.endElement (); } public void enumValue (final String value) throws Exception { w.startElement ("enum"); w.characters (value); w.endElement (); } public void enumGroupRef (final String name, final EnumGroup enumGroup) throws Exception { w.startElement ("enumGroupRef"); w.attribute ("name", name); w.endElement (); } public void cdataDatatype () throws IOException { w.startElement ("cdata"); w.endElement (); } public void tokenizedDatatype (final String typeName) throws IOException { w.startElement ("tokenized"); w.attribute ("name", typeName); w.endElement (); } public void enumDatatype (final EnumGroup enumGroup) throws Exception { w.startElement ("tokenized"); enumGroup.accept (this); w.endElement (); } public void notationDatatype (final EnumGroup enumGroup) throws Exception { w.startElement ("tokenized"); w.attribute ("name", "NOTATION"); enumGroup.accept (this); w.endElement (); } public void datatypeRef (final String name, final Datatype datatype) throws IOException { w.startElement ("datatypeRef"); w.attribute ("name", name); w.endElement (); } public void flagRef (final String name, final Flag flag) throws IOException { w.startElement ("flagRef"); w.attribute ("name", name); w.endElement (); } public void include () throws IOException { w.startElement ("include"); w.endElement (); } public void ignore () throws IOException { w.startElement ("ignore"); w.endElement (); } public void includedSection (final Flag flag, final TopLevel [] contents) throws Exception { w.startElement ("includedSection"); if (flag instanceof FlagRef) w.attribute ("flag", ((FlagRef) flag).getName ()); for (final TopLevel content : contents) content.accept (this); w.endElement (); } public void ignoredSection (final Flag flag, final String contents) throws Exception { w.startElement ("ignoredSection"); if (flag instanceof FlagRef) w.attribute ("flag", ((FlagRef) flag).getName ()); w.characters (contents); w.endElement (); } public void externalIdDef (final String name, final ExternalId xid) throws IOException { w.startElement ("externalId"); w.attribute ("name", name); externalId (xid); w.endElement (); } public void externalIdRef (final String name, final ExternalId xid, final String uri, final String encoding, final TopLevel [] contents) throws Exception { w.startElement ("externalIdRef"); w.attribute ("name", name); for (final TopLevel content : contents) content.accept (this); w.endElement (); } public void internalEntityDecl (final String name, final String value) throws Exception { w.startElement ("internalEntity"); w.attribute ("name", name); final boolean useCharRef = value.length () == 1 && value.charAt (0) >= 0x80; w.characters (value, useCharRef); w.endElement (); } public void externalEntityDecl (final String name, final ExternalId xid) throws IOException { w.startElement ("externalEntity"); w.attribute ("name", name); externalId (xid); w.endElement (); } public void notationDecl (final String name, final ExternalId xid) throws IOException { w.startElement ("notation"); w.attribute ("name", name); externalId (xid); w.endElement (); } private void externalId (final ExternalId xid) throws IOException { attributeIfNotNull ("system", xid.getSystemId ()); attributeIfNotNull ("public", xid.getPublicId ()); // this messes up testing // attributeIfNotNull("xml:base", xid.getBaseUri()); } private void attributeIfNotNull (final String name, final String value) throws IOException { if (value != null) w.attribute (name, value); } public void nameSpecDef (final String name, final NameSpec nameSpec) throws Exception { w.startElement ("nameSpec"); w.attribute ("name", name); nameSpec.accept (this); w.endElement (); } public void name (final String value) throws IOException { w.startElement ("name"); w.characters (value); w.endElement (); } public void nameSpecRef (final String name, final NameSpec nameSpec) throws Exception { w.startElement ("nameSpecRef"); w.attribute ("name", name); w.endElement (); } public void overriddenDef (final Def def, final boolean duplicate) throws Exception { w.startElement ("overridden"); if (duplicate) { w.startElement ("duplicate"); w.attribute ("name", def.getName ()); w.endElement (); } else def.accept (this); w.endElement (); } public void paramDef (final String name, final String value) throws IOException { w.startElement ("param"); w.attribute ("name", name); w.characters (value); w.endElement (); } public void defaultValue (final String value) throws Exception { w.startElement ("default"); w.characters (value); w.endElement (); } public void fixedValue (final String value) throws Exception { w.startElement ("fixed"); w.characters (value); w.endElement (); } public void impliedValue () throws Exception { w.startElement ("implied"); w.endElement (); } public void requiredValue () throws Exception { w.startElement ("required"); w.endElement (); } public void attributeDefaultRef (final String name, final AttributeDefault attributeDefault) throws Exception { w.startElement ("attributeDefaultRef"); w.attribute ("name", name); w.endElement (); } }
/* * Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.swing.plaf.nimbus; import java.awt.*; import java.awt.geom.*; import java.awt.image.*; import javax.swing.*; import javax.swing.Painter; final class ComboBoxArrowButtonPainter extends AbstractRegionPainter { //package private integers representing the available states that //this painter will paint. These are used when creating a new instance //of ComboBoxArrowButtonPainter to determine which region/state is being painted //by that instance. static final int BACKGROUND_DISABLED = 1; static final int BACKGROUND_ENABLED = 2; static final int BACKGROUND_ENABLED_MOUSEOVER = 3; static final int BACKGROUND_ENABLED_PRESSED = 4; static final int BACKGROUND_DISABLED_EDITABLE = 5; static final int BACKGROUND_ENABLED_EDITABLE = 6; static final int BACKGROUND_MOUSEOVER_EDITABLE = 7; static final int BACKGROUND_PRESSED_EDITABLE = 8; static final int BACKGROUND_SELECTED_EDITABLE = 9; static final int FOREGROUND_ENABLED = 10; static final int FOREGROUND_MOUSEOVER = 11; static final int FOREGROUND_DISABLED = 12; static final int FOREGROUND_PRESSED = 13; static final int FOREGROUND_SELECTED = 14; private int state; //refers to one of the static final ints above private PaintContext ctx; //the following 4 variables are reused during the painting code of the layers private Path2D path = new Path2D.Float(); private Rectangle2D rect = new Rectangle2D.Float(0, 0, 0, 0); private RoundRectangle2D roundRect = new RoundRectangle2D.Float(0, 0, 0, 0, 0, 0); private Ellipse2D ellipse = new Ellipse2D.Float(0, 0, 0, 0); //All Colors used for painting are stored here. Ideally, only those colors being used //by a particular instance of ComboBoxArrowButtonPainter would be created. For the moment at least, //however, all are created for each instance. private Color color1 = decodeColor("nimbusBlueGrey", -0.6111111f, -0.110526316f, -0.74509805f, -247); private Color color2 = decodeColor("nimbusBase", 0.021348298f, -0.56289876f, 0.2588235f, 0); private Color color3 = decodeColor("nimbusBase", 0.010237217f, -0.55799407f, 0.20784312f, 0); private Color color4 = new Color(255, 200, 0, 255); private Color color5 = decodeColor("nimbusBase", 0.021348298f, -0.59223604f, 0.35294116f, 0); private Color color6 = decodeColor("nimbusBase", 0.02391243f, -0.5774183f, 0.32549018f, 0); private Color color7 = decodeColor("nimbusBase", 0.021348298f, -0.56722116f, 0.3098039f, 0); private Color color8 = decodeColor("nimbusBase", 0.021348298f, -0.567841f, 0.31764704f, 0); private Color color9 = decodeColor("nimbusBlueGrey", -0.6111111f, -0.110526316f, -0.74509805f, -191); private Color color10 = decodeColor("nimbusBase", 5.1498413E-4f, -0.34585923f, -0.007843137f, 0); private Color color11 = decodeColor("nimbusBase", 5.1498413E-4f, -0.095173776f, -0.25882354f, 0); private Color color12 = decodeColor("nimbusBase", 0.004681647f, -0.6197143f, 0.43137252f, 0); private Color color13 = decodeColor("nimbusBase", 0.0023007393f, -0.46825016f, 0.27058822f, 0); private Color color14 = decodeColor("nimbusBase", 5.1498413E-4f, -0.43866998f, 0.24705881f, 0); private Color color15 = decodeColor("nimbusBase", 5.1498413E-4f, -0.4625541f, 0.35686272f, 0); private Color color16 = decodeColor("nimbusBase", 0.0013483167f, -0.1769987f, -0.12156865f, 0); private Color color17 = decodeColor("nimbusBase", 0.059279382f, 0.3642857f, -0.43529415f, 0); private Color color18 = decodeColor("nimbusBase", 0.004681647f, -0.6198413f, 0.43921566f, 0); private Color color19 = decodeColor("nimbusBase", 0.0023007393f, -0.48084703f, 0.33725488f, 0); private Color color20 = decodeColor("nimbusBase", 5.1498413E-4f, -0.4555341f, 0.3215686f, 0); private Color color21 = decodeColor("nimbusBase", 5.1498413E-4f, -0.4757143f, 0.43137252f, 0); private Color color22 = decodeColor("nimbusBase", -0.57865167f, -0.6357143f, -0.54901963f, 0); private Color color23 = decodeColor("nimbusBase", -3.528595E-5f, 0.018606722f, -0.23137257f, 0); private Color color24 = decodeColor("nimbusBase", -4.2033195E-4f, -0.38050595f, 0.20392156f, 0); private Color color25 = decodeColor("nimbusBase", 7.13408E-4f, -0.064285696f, 0.027450979f, 0); private Color color26 = decodeColor("nimbusBase", 0.0f, -0.00895375f, 0.007843137f, 0); private Color color27 = decodeColor("nimbusBase", 8.9377165E-4f, -0.13853917f, 0.14509803f, 0); private Color color28 = decodeColor("nimbusBase", -0.57865167f, -0.6357143f, -0.37254906f, 0); private Color color29 = decodeColor("nimbusBase", -0.57865167f, -0.6357143f, -0.5254902f, 0); private Color color30 = decodeColor("nimbusBase", 0.027408898f, -0.57391655f, 0.1490196f, 0); private Color color31 = decodeColor("nimbusBase", 0.0f, -0.6357143f, 0.45098037f, 0); //Array of current component colors, updated in each paint call private Object[] componentColors; public ComboBoxArrowButtonPainter(PaintContext ctx, int state) { super(); this.state = state; this.ctx = ctx; } @Override protected void doPaint(Graphics2D g, JComponent c, int width, int height, Object[] extendedCacheKeys) { //populate componentColors array with colors calculated in getExtendedCacheKeys call componentColors = extendedCacheKeys; //generate this entire method. Each state/bg/fg/border combo that has //been painted gets its own KEY and paint method. switch(state) { case BACKGROUND_DISABLED_EDITABLE: paintBackgroundDisabledAndEditable(g); break; case BACKGROUND_ENABLED_EDITABLE: paintBackgroundEnabledAndEditable(g); break; case BACKGROUND_MOUSEOVER_EDITABLE: paintBackgroundMouseOverAndEditable(g); break; case BACKGROUND_PRESSED_EDITABLE: paintBackgroundPressedAndEditable(g); break; case BACKGROUND_SELECTED_EDITABLE: paintBackgroundSelectedAndEditable(g); break; case FOREGROUND_ENABLED: paintForegroundEnabled(g); break; case FOREGROUND_MOUSEOVER: paintForegroundMouseOver(g); break; case FOREGROUND_DISABLED: paintForegroundDisabled(g); break; case FOREGROUND_PRESSED: paintForegroundPressed(g); break; case FOREGROUND_SELECTED: paintForegroundSelected(g); break; } } @Override protected final PaintContext getPaintContext() { return ctx; } private void paintBackgroundDisabledAndEditable(Graphics2D g) { path = decodePath1(); g.setPaint(color1); g.fill(path); path = decodePath2(); g.setPaint(decodeGradient1(path)); g.fill(path); path = decodePath3(); g.setPaint(color4); g.fill(path); path = decodePath4(); g.setPaint(decodeGradient2(path)); g.fill(path); } private void paintBackgroundEnabledAndEditable(Graphics2D g) { path = decodePath1(); g.setPaint(color9); g.fill(path); path = decodePath2(); g.setPaint(decodeGradient3(path)); g.fill(path); path = decodePath3(); g.setPaint(color4); g.fill(path); path = decodePath4(); g.setPaint(decodeGradient4(path)); g.fill(path); } private void paintBackgroundMouseOverAndEditable(Graphics2D g) { path = decodePath1(); g.setPaint(color9); g.fill(path); path = decodePath2(); g.setPaint(decodeGradient5(path)); g.fill(path); path = decodePath3(); g.setPaint(color4); g.fill(path); path = decodePath4(); g.setPaint(decodeGradient6(path)); g.fill(path); } private void paintBackgroundPressedAndEditable(Graphics2D g) { path = decodePath1(); g.setPaint(color9); g.fill(path); path = decodePath2(); g.setPaint(decodeGradient7(path)); g.fill(path); path = decodePath3(); g.setPaint(color4); g.fill(path); path = decodePath4(); g.setPaint(decodeGradient8(path)); g.fill(path); } private void paintBackgroundSelectedAndEditable(Graphics2D g) { path = decodePath1(); g.setPaint(color9); g.fill(path); path = decodePath2(); g.setPaint(decodeGradient7(path)); g.fill(path); path = decodePath3(); g.setPaint(color4); g.fill(path); path = decodePath4(); g.setPaint(decodeGradient8(path)); g.fill(path); } private void paintForegroundEnabled(Graphics2D g) { path = decodePath5(); g.setPaint(decodeGradient9(path)); g.fill(path); } private void paintForegroundMouseOver(Graphics2D g) { path = decodePath6(); g.setPaint(decodeGradient9(path)); g.fill(path); } private void paintForegroundDisabled(Graphics2D g) { path = decodePath7(); g.setPaint(color30); g.fill(path); } private void paintForegroundPressed(Graphics2D g) { path = decodePath8(); g.setPaint(color31); g.fill(path); } private void paintForegroundSelected(Graphics2D g) { path = decodePath7(); g.setPaint(color31); g.fill(path); } private Path2D decodePath1() { path.reset(); path.moveTo(decodeX(0.0f), decodeY(2.0f)); path.lineTo(decodeX(2.75f), decodeY(2.0f)); path.lineTo(decodeX(2.75f), decodeY(2.25f)); path.curveTo(decodeAnchorX(2.75f, 0.0f), decodeAnchorY(2.25f, 4.0f), decodeAnchorX(2.125f, 3.0f), decodeAnchorY(2.875f, 0.0f), decodeX(2.125f), decodeY(2.875f)); path.lineTo(decodeX(0.0f), decodeY(2.875f)); path.lineTo(decodeX(0.0f), decodeY(2.0f)); path.closePath(); return path; } private Path2D decodePath2() { path.reset(); path.moveTo(decodeX(0.0f), decodeY(0.25f)); path.lineTo(decodeX(2.125f), decodeY(0.25f)); path.curveTo(decodeAnchorX(2.125f, 3.0f), decodeAnchorY(0.25f, 0.0f), decodeAnchorX(2.75f, 0.0f), decodeAnchorY(0.875f, -3.0f), decodeX(2.75f), decodeY(0.875f)); path.lineTo(decodeX(2.75f), decodeY(2.125f)); path.curveTo(decodeAnchorX(2.75f, 0.0f), decodeAnchorY(2.125f, 3.0f), decodeAnchorX(2.125f, 3.0f), decodeAnchorY(2.75f, 0.0f), decodeX(2.125f), decodeY(2.75f)); path.lineTo(decodeX(0.0f), decodeY(2.75f)); path.lineTo(decodeX(0.0f), decodeY(0.25f)); path.closePath(); return path; } private Path2D decodePath3() { path.reset(); path.moveTo(decodeX(0.85294116f), decodeY(2.639706f)); path.lineTo(decodeX(0.85294116f), decodeY(2.639706f)); path.closePath(); return path; } private Path2D decodePath4() { path.reset(); path.moveTo(decodeX(1.0f), decodeY(0.375f)); path.lineTo(decodeX(2.0f), decodeY(0.375f)); path.curveTo(decodeAnchorX(2.0f, 4.0f), decodeAnchorY(0.375f, 0.0f), decodeAnchorX(2.625f, 0.0f), decodeAnchorY(1.0f, -4.0f), decodeX(2.625f), decodeY(1.0f)); path.lineTo(decodeX(2.625f), decodeY(2.0f)); path.curveTo(decodeAnchorX(2.625f, 0.0f), decodeAnchorY(2.0f, 4.0f), decodeAnchorX(2.0f, 4.0f), decodeAnchorY(2.625f, 0.0f), decodeX(2.0f), decodeY(2.625f)); path.lineTo(decodeX(1.0f), decodeY(2.625f)); path.lineTo(decodeX(1.0f), decodeY(0.375f)); path.closePath(); return path; } private Path2D decodePath5() { path.reset(); path.moveTo(decodeX(0.9995915f), decodeY(1.3616071f)); path.lineTo(decodeX(2.0f), decodeY(0.8333333f)); path.lineTo(decodeX(2.0f), decodeY(1.8571429f)); path.lineTo(decodeX(0.9995915f), decodeY(1.3616071f)); path.closePath(); return path; } private Path2D decodePath6() { path.reset(); path.moveTo(decodeX(1.00625f), decodeY(1.3526785f)); path.lineTo(decodeX(2.0f), decodeY(0.8333333f)); path.lineTo(decodeX(2.0f), decodeY(1.8571429f)); path.lineTo(decodeX(1.00625f), decodeY(1.3526785f)); path.closePath(); return path; } private Path2D decodePath7() { path.reset(); path.moveTo(decodeX(1.0117648f), decodeY(1.3616071f)); path.lineTo(decodeX(2.0f), decodeY(0.8333333f)); path.lineTo(decodeX(2.0f), decodeY(1.8571429f)); path.lineTo(decodeX(1.0117648f), decodeY(1.3616071f)); path.closePath(); return path; } private Path2D decodePath8() { path.reset(); path.moveTo(decodeX(1.0242647f), decodeY(1.3526785f)); path.lineTo(decodeX(2.0f), decodeY(0.8333333f)); path.lineTo(decodeX(2.0f), decodeY(1.8571429f)); path.lineTo(decodeX(1.0242647f), decodeY(1.3526785f)); path.closePath(); return path; } private Paint decodeGradient1(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.5f,1.0f }, new Color[] { color2, decodeColor(color2,color3,0.5f), color3}); } private Paint decodeGradient2(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.171875f,0.34375f,0.4815341f,0.6193182f,0.8096591f,1.0f }, new Color[] { color5, decodeColor(color5,color6,0.5f), color6, decodeColor(color6,color7,0.5f), color7, decodeColor(color7,color8,0.5f), color8}); } private Paint decodeGradient3(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.5f,1.0f }, new Color[] { color10, decodeColor(color10,color11,0.5f), color11}); } private Paint decodeGradient4(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.12299465f,0.44652405f,0.5441176f,0.64171124f,0.8208556f,1.0f }, new Color[] { color12, decodeColor(color12,color13,0.5f), color13, decodeColor(color13,color14,0.5f), color14, decodeColor(color14,color15,0.5f), color15}); } private Paint decodeGradient5(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.5f,1.0f }, new Color[] { color16, decodeColor(color16,color17,0.5f), color17}); } private Paint decodeGradient6(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.12299465f,0.44652405f,0.5441176f,0.64171124f,0.81283426f,0.98395723f }, new Color[] { color18, decodeColor(color18,color19,0.5f), color19, decodeColor(color19,color20,0.5f), color20, decodeColor(color20,color21,0.5f), color21}); } private Paint decodeGradient7(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.5f,1.0f }, new Color[] { color22, decodeColor(color22,color23,0.5f), color23}); } private Paint decodeGradient8(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((0.5f * w) + x, (0.0f * h) + y, (0.5f * w) + x, (1.0f * h) + y, new float[] { 0.0f,0.12299465f,0.44652405f,0.5441176f,0.64171124f,0.8208556f,1.0f }, new Color[] { color24, decodeColor(color24,color25,0.5f), color25, decodeColor(color25,color26,0.5f), color26, decodeColor(color26,color27,0.5f), color27}); } private Paint decodeGradient9(Shape s) { Rectangle2D bounds = s.getBounds2D(); float x = (float)bounds.getX(); float y = (float)bounds.getY(); float w = (float)bounds.getWidth(); float h = (float)bounds.getHeight(); return decodeGradient((1.0f * w) + x, (0.5f * h) + y, (0.0f * w) + x, (0.5f * h) + y, new float[] { 0.0f,0.5f,1.0f }, new Color[] { color28, decodeColor(color28,color29,0.5f), color29}); } }
package org.apache.cassandra.db.filter; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.nio.ByteBuffer; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.*; import org.apache.cassandra.db.columniterator.IColumnIterator; import org.apache.cassandra.thrift.IndexExpression; import org.apache.cassandra.thrift.IndexOperator; import org.apache.cassandra.utils.ByteBufferUtil; /** * Extends a column filter (IFilter) to include a number of IndexExpression. */ public abstract class ExtendedFilter { private static Logger logger = LoggerFactory.getLogger(ExtendedFilter.class); public final ColumnFamilyStore cfs; protected final IFilter originalFilter; private final int maxResults; private final boolean maxIsColumns; private final boolean isPaging; public static ExtendedFilter create(ColumnFamilyStore cfs, IFilter filter, List<IndexExpression> clause, int maxResults, boolean maxIsColumns, boolean isPaging) { if (clause == null || clause.isEmpty()) { return new EmptyClauseFilter(cfs, filter, maxResults, maxIsColumns, isPaging); } else { if (isPaging) throw new IllegalArgumentException("Cross-row paging is not supported along with index clauses"); return new FilterWithClauses(cfs, filter, clause, maxResults, maxIsColumns); } } protected ExtendedFilter(ColumnFamilyStore cfs, IFilter filter, int maxResults, boolean maxIsColumns, boolean isPaging) { assert cfs != null; assert filter != null; this.cfs = cfs; this.originalFilter = filter; this.maxResults = maxResults; this.maxIsColumns = maxIsColumns; this.isPaging = isPaging; if (maxIsColumns) originalFilter.updateColumnsLimit(maxResults); if (isPaging && (!(originalFilter instanceof SliceQueryFilter) || ((SliceQueryFilter)originalFilter).finish.remaining() != 0)) throw new IllegalArgumentException("Cross-row paging is only supported for SliceQueryFilter having an empty finish column"); } public int maxRows() { return maxIsColumns ? Integer.MAX_VALUE : maxResults; } public int maxColumns() { return maxIsColumns ? maxResults : Integer.MAX_VALUE; } /** * Update the filter if necessary given the number of column already * fetched. */ public void updateFilter(int currentColumnsCount) { // As soon as we'd done our first call, we want to reset the start column if we're paging if (isPaging) ((SliceQueryFilter)initialFilter()).start = ByteBufferUtil.EMPTY_BYTE_BUFFER; if (!maxIsColumns) return; int remaining = maxResults - currentColumnsCount; initialFilter().updateColumnsLimit(remaining); } /** The initial filter we'll do our first slice with (either the original or a superset of it) */ public abstract IFilter initialFilter(); public abstract List<IndexExpression> getClause(); /** * Returns a filter to query the columns from the clause that the initial slice filter may not have caught. * @param data the data retrieve by the initial filter * @return a filter or null if there can't be any columns we missed with our initial filter (typically if it was a names query, or a slice of the entire row) */ public abstract IFilter getExtraFilter(ColumnFamily data); /** * @return data pruned down to the columns originally asked for */ public abstract ColumnFamily prune(ColumnFamily data); /** * @return true if the provided data satisfies all the expressions from * the clause of this filter. */ public abstract boolean isSatisfiedBy(ColumnFamily data); public static boolean satisfies(int comparison, IndexOperator op) { switch (op) { case EQ: return comparison == 0; case GTE: return comparison >= 0; case GT: return comparison > 0; case LTE: return comparison <= 0; case LT: return comparison < 0; default: throw new IllegalStateException(); } } private static class FilterWithClauses extends ExtendedFilter { protected final List<IndexExpression> clause; protected final IFilter initialFilter; public FilterWithClauses(ColumnFamilyStore cfs, IFilter filter, List<IndexExpression> clause, int maxResults, boolean maxIsColumns) { super(cfs, filter, maxResults, maxIsColumns, false); assert clause != null; this.clause = clause; this.initialFilter = computeInitialFilter(); } /** Sets up the initial filter. */ private IFilter computeInitialFilter() { if (originalFilter instanceof SliceQueryFilter) { // if we have a high chance of getting all the columns in a single index slice (and it's not too costly), do that. // otherwise, the extraFilter (lazily created) will fetch by name the columns referenced by the additional expressions. if (cfs.getMaxRowSize() < DatabaseDescriptor.getColumnIndexSize()) { logger.debug("Expanding slice filter to entire row to cover additional expressions"); return new SliceQueryFilter(ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, ((SliceQueryFilter) originalFilter).reversed, Integer.MAX_VALUE); } } else { logger.debug("adding columns to original Filter to cover additional expressions"); assert originalFilter instanceof NamesQueryFilter; SortedSet<ByteBuffer> columns = new TreeSet<ByteBuffer>(cfs.getComparator()); for (IndexExpression expr : clause) { columns.add(expr.column_name); } if (columns.size() > 0) { columns.addAll(((NamesQueryFilter) originalFilter).columns); return new NamesQueryFilter(columns); } } return originalFilter; } public IFilter initialFilter() { return initialFilter; } public List<IndexExpression> getClause() { return clause; } /* * We may need an extra query only if the original was a slice query (and thus may have miss the expression for the clause). * Even then, there is no point in doing an extra query if the original filter grabbed the whole row. * Lastly, we only need the extra query if we haven't yet got all the expressions from the clause. */ private boolean needsExtraQuery(ColumnFamily data) { if (!(originalFilter instanceof SliceQueryFilter)) return false; SliceQueryFilter filter = (SliceQueryFilter)originalFilter; // Check if we've fetch the whole row if (filter.start.equals(ByteBufferUtil.EMPTY_BYTE_BUFFER) && filter.finish.equals(ByteBufferUtil.EMPTY_BYTE_BUFFER) && filter.count == Integer.MAX_VALUE) return false; for (IndexExpression expr : clause) { if (data.getColumn(expr.column_name) == null) { logger.debug("adding extraFilter to cover additional expressions"); return true; } } return false; } public IFilter getExtraFilter(ColumnFamily data) { if (!needsExtraQuery(data)) return null; // Note: for counters we must be careful to not add a column that was already there (to avoid overcount). That is // why we do the dance of avoiding to query any column we already have (it's also more efficient anyway) SortedSet<ByteBuffer> columns = new TreeSet<ByteBuffer>(cfs.getComparator()); for (IndexExpression expr : clause) { if (data.getColumn(expr.column_name) == null) columns.add(expr.column_name); } assert !columns.isEmpty(); return new NamesQueryFilter(columns); } public ColumnFamily prune(ColumnFamily data) { if (initialFilter == originalFilter) return data; ColumnFamily pruned = data.cloneMeShallow(); IColumnIterator iter = originalFilter.getMemtableColumnIterator(data, null); originalFilter.collectReducedColumns(pruned, iter, cfs.gcBefore()); return pruned; } public boolean isSatisfiedBy(ColumnFamily data) { // We enforces even the primary clause because reads are not synchronized with writes and it is thus possible to have a race // where the index returned a row which doesn't have the primary column when we actually read it for (IndexExpression expression : clause) { // check column data vs expression IColumn column = data.getColumn(expression.column_name); if (column == null) return false; int v = data.metadata().getValueValidator(expression.column_name).compare(column.value(), expression.value); if (!satisfies(v, expression.op)) return false; } return true; } } private static class EmptyClauseFilter extends ExtendedFilter { public EmptyClauseFilter(ColumnFamilyStore cfs, IFilter filter, int maxResults, boolean maxIsColumns, boolean isPaging) { super(cfs, filter, maxResults, maxIsColumns, isPaging); } public IFilter initialFilter() { return originalFilter; } public List<IndexExpression> getClause() { throw new UnsupportedOperationException(); } public IFilter getExtraFilter(ColumnFamily data) { return null; } public ColumnFamily prune(ColumnFamily data) { return data; } public boolean isSatisfiedBy(ColumnFamily data) { return true; } } }
package com.smict.schedule.data; import java.io.IOException; import java.net.ConnectException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import com.smict.person.model.DoctorModel; import com.smict.schedule.model.ScheduleModel; import ldc.util.Auth; import ldc.util.DBConnect; import ldc.util.DateUtil; public class ScheduleData { private DBConnect agent = new DBConnect(); Connection conn = null; Statement Stmt = null; PreparedStatement pStmt = null; ResultSet rs = null; DateUtil dateUtil = new DateUtil(); /** * fetch Treatment room schedule. * @author anubissmile * @param ScheduleModel schModel * @return List<ScheduleModel> */ public List<ScheduleModel> fetchDentistSchedule(ScheduleModel schModel){ String start, end, branch_id, room; start = schModel.getStartDateTime(); end = schModel.getEndDateTime(); branch_id = Auth.user().getBranchCode(); room = Integer.toString(schModel.getBranchRoomId()); String SQL = "SELECT doctor_workday.workday_id, " + "doctor_workday.doctor_id, " + "doctor_workday.start_datetime, " + "doctor_workday.end_datetime, " + "doctor_workday.work_hour, " + "doctor_workday.branch_id, " + "doctor_workday.branch_room_id, " + "doctor_workday.checkin_status, " + "doctor_workday.checkin_datetime, " + "doctor_workday.checkout_datetime, " + "doctor.first_name_th, " + "doctor.last_name_th, " + "room_id.room_id, room_id.room_name " + "FROM doctor_workday " + "LEFT JOIN doctor ON doctor_workday.doctor_id = doctor.doctor_id " + "LEFT JOIN room_id ON doctor_workday.branch_id = room_id.room_branch_code " + " AND doctor_workday.branch_room_id = room_id.room_id" + " WHERE doctor_workday.start_datetime BETWEEN '" + start + "' AND '" + end + "' " + "AND doctor_workday.branch_id = '" + branch_id + "' AND doctor_workday.branch_room_id = '" + room + "' " + "ORDER BY doctor_workday.branch_room_id ASC, doctor_workday.start_datetime ASC "; agent.connectMySQL(); agent.exeQuery(SQL); if(agent.size() > 0){ try { ResultSet rs = agent.getRs(); List<ScheduleModel> schList = new ArrayList<ScheduleModel>(); while(agent.getRs().next()){ ScheduleModel scheduleModel = new ScheduleModel(); String[] re_start = agent.getRs().getString("start_datetime").split(" "); String[] re_end = agent.getRs().getString("end_datetime").split(" "); scheduleModel.setDBField( rs.getInt("doctor_id"), rs.getInt("branch_id"), rs.getInt("branch_room_id"), re_start[1], re_end[1], rs.getString("checkin_status"), rs.getString("checkin_datetime"), rs.getString("checkout_datetime"), rs.getInt("work_hour"), rs.getString("first_name_th"), rs.getString("last_name_th"), rs.getString("room_name"), schModel.getWorkDate(), rs.getInt("workday_id") ); scheduleModel.setStartDateTime(rs.getString("start_datetime")); scheduleModel.setEndDateTime(rs.getString("end_datetime")); schList.add(scheduleModel); } return schList; } catch (SQLException e) { e.printStackTrace(); } } agent.disconnectMySQL(); return null; } /** * Insert dentist's schedule. * @author anubissmile * @param schModel * @return int | Count of records that affected. */ public int insertDentistSchedule(ScheduleModel schModel){ String SQL = "INSERT INTO `doctor_workday` " + " (`doctor_id`, `start_datetime`, `end_datetime`, `work_hour`, " + "`branch_id`, `branch_room_id`, `checkin_status`, `checkin_datetime`, `checkout_datetime`)" + " VALUES ('" + schModel.getDoctorId() + "', " + "'" + schModel.getStartDateTime() + "', " + "'" + schModel.getEndDateTime() + "', " + "'" + schModel.getWorkHour() + "', " + "'" + schModel.getBranchId() + "', " + "'" + schModel.getBranchRoomId() + "', " + "'" + schModel.getCheckInStatus() + "', " + "'" + schModel.getCheckInDateTime() + "', " + "'" + schModel.getCheckOutDateTime() + "')"; // System.out.println("==============\n" + SQL + "\n=================="); agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } /** * Finding the overlap time range. * @author anubissmile * @param ScheduleModel | schModel * @return boolean | Return True when is overlap otherwise False. */ public boolean findOverlapTimeRange(ScheduleModel schModel){ String SQL = "SELECT doctor_workday.workday_id, " + "doctor_workday.doctor_id, " + "doctor_workday.start_datetime, " + "doctor_workday.end_datetime, " + "doctor_workday.work_hour, " + "doctor_workday.branch_id, " + "doctor_workday.branch_room_id, " + "doctor_workday.checkin_status, " + "doctor_workday.checkin_datetime, " + "doctor_workday.checkout_datetime " + "FROM doctor_workday " + "WHERE doctor_workday.start_datetime BETWEEN '" + schModel.getStartDateTime() + "' AND '" + schModel.getEndDateTime() + "' OR " + "doctor_workday.end_datetime BETWEEN '" + schModel.getStartDateTime() + "' AND '" + schModel.getEndDateTime() + "' " + "AND doctor_workday.branch_room_id = '" + schModel.getBranchRoomId() + "' "; agent.connectMySQL(); agent.exeQuery(SQL); int size = agent.size(); agent.disconnectMySQL(); if(size > 0){ return true; } return false; } /** * Checking in the treatment room * @param ScheduleModel schModel * @return int | Count of row that get affected by manipulate. */ public int scheduleCheckingIn(ScheduleModel schModel){ String SQL = "UPDATE `doctor_workday` SET `checkin_status`='2', `checkin_datetime` = '" + DateUtil.GetDatetime_YYYY_MM_DD_HH_MM_SS() + "' " + " WHERE (`workday_id`='" + schModel.getWorkDayId() + "' AND `branch_id` = '" + schModel.getBranchId() + "')"; agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } /** * Checking Out the treatment room * @param ScheduleModel schModel * @return int | Count of row that get affected by manipulate. */ public int scheduleCheckingOut(ScheduleModel schModel){ String SQL = "UPDATE `doctor_workday` SET `checkin_status`='3', `checkout_datetime` = '" + DateUtil.GetDatetime_YYYY_MM_DD_HH_MM_SS() + "' " + " WHERE (`workday_id`='" + schModel.getWorkDayId() + "' AND `branch_id` = '" + schModel.getBranchId() + "')"; agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } public int EmpCheckingIn(ScheduleModel schModel, String [] Emp){ int i = 0; schModel.setBranchId(Integer.valueOf(Auth.user().getBranchCode())); String SQL = "INSERT INTO employee_workday (emp_id,branch_id,doctor_workday_id,branch_room_id) " +"VALUES "; for(String empId : Emp){ if(i>0){ SQL +=","; } SQL += "('"+empId+"','"+schModel.getBranchId()+"','"+schModel.getWorkDayId()+"',"+schModel.getRoomId()+")"; i++; } agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } public int DeleteEmpCheckInRoom(ScheduleModel schModel){ schModel.setBranchId(Integer.valueOf(Auth.user().getBranchCode())); String SQL = "DELETE From employee_workday " + "Where branch_id = '"+schModel.getBranchId()+"' " + "AND doctor_workday_id = '"+schModel.getWorkDayId()+"' " + "AND branch_room_id = '"+schModel.getRoomId()+"' "; agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } public Map<String,String> Get_DoctorlistForWork() throws IOException, Exception { String branchID = Auth.user().getBranchID(); String SQL = "SELECT doctor.doctor_id, doctor.first_name_th, doctor.last_name_th, pre_name.pre_name_th " + "FROM doctor " + "INNER JOIN pre_name ON pre_name.pre_name_id = doctor.pre_name_id " + "INNER JOIN branch_mgr_rel_doctor ON doctor.doctor_id = branch_mgr_rel_doctor.doctor_id " + "INNER JOIN branch_standard_rel_doctor ON doctor.doctor_id = branch_standard_rel_doctor.doctor_id " + "WHERE branch_mgr_rel_doctor.branch_id = '"+branchID+"' OR branch_standard_rel_doctor.branch_id = '"+branchID+"' " + "GROUP BY doctor.doctor_id "; conn = agent.getConnectMYSql(); Stmt = conn.createStatement(); ResultSet rs = Stmt.executeQuery(SQL); Map <String,String>ResultList = new HashMap<String,String>(); while (rs.next()) { // vender_id,vender_name,create_by,create_datetime,update_by,update_datetime ResultList.put(rs.getString("doctor_id"), rs.getString("pre_name_th")+""+rs.getString("first_name_th")+" "+rs.getString("last_name_th")); } if (!rs.isClosed()) rs.close(); if (!Stmt.isClosed()) Stmt.close(); if (!conn.isClosed()) conn.close(); return ResultList; } public List<ScheduleModel> ListDoctorWorkDayCheck(){ String branchID = Auth.user().getBranchCode(); String SQL = "SELECT doctor_workday.workday_id, doctor_workday.doctor_id, pre_name.pre_name_th, doctor.first_name_th, doctor.last_name_th, " + "doctor_workday.start_datetime, doctor_workday.end_datetime, " + "CASE doctor_workday.checkin_status WHEN '1' THEN 'Waiting' WHEN '2' THEN 'CheckIn' WHEN '3' THEN 'CheckOut' END AS 'Status' " + "FROM " + "doctor_workday " + "INNER JOIN doctor ON doctor_workday.doctor_id = doctor.doctor_id " + "INNER JOIN pre_name ON doctor.pre_name_id = pre_name.pre_name_id " + "WHERE DATE_FORMAT(doctor_workday.start_datetime,'%Y-%m-%d') = CURDATE() " + "AND doctor_workday.branch_id = '"+branchID+"' " + "ORDER BY doctor_workday.checkin_status desc "; try { conn = agent.getConnectMYSql(); Stmt = conn.createStatement(); ResultSet res = Stmt.executeQuery(SQL); List<ScheduleModel> schModelList = new ArrayList<ScheduleModel>(); while(res.next()){ ScheduleModel schModel = new ScheduleModel(); schModel.setWorkDayId(res.getInt("workday_id")); schModel.setDoctorId(res.getInt("doctor_id")); schModel.setPre_name_th(res.getString("pre_name_th")); schModel.setFirst_name_th(res.getString("first_name_th")); schModel.setLast_name_th(res.getString("last_name_th")); schModel.setCheckInStatus(res.getString("Status")); String[] dateTime = res.getString("start_datetime").split(" "); String[] time = dateTime[1].split(Pattern.quote(".")); schModel.setStartDateTime(time[0]); String[] dateTimeend = res.getString("end_datetime").split(" "); String[] timeend = dateTimeend[1].split(Pattern.quote(".")); schModel.setEndDateTime(timeend[0]); schModelList.add(schModel); } if (!res.isClosed()) res.close(); if (!Stmt.isClosed()) Stmt.close(); if (!conn.isClosed()) conn.close(); return schModelList; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } public int InsertDentistEmergency(ScheduleModel schModel){ String SQL = "INSERT INTO `doctor_workday` " + " (`doctor_id`, `start_datetime`, `end_datetime`, `work_hour`, " + "`branch_id`, `checkin_status`, `branch_room_id`, `checkin_datetime`, `checkout_datetime`)" + " VALUES ('" + schModel.getDoctorId() + "', " + " concat(CURDATE(),' " + schModel.getStartDateTime() + "',':00'), " + " concat(CURDATE(),' " + schModel.getEndDateTime() + "',':00'), " + "'" + schModel.getWorkHour() + "', " + "'" + schModel.getBranchId() + "', " + "'" + schModel.getCheckInStatus() + "', " + "'0', " + "'" + schModel.getCheckInDateTime() + "', " + "'" + schModel.getCheckOutDateTime() + "')"; // System.out.println("==============\n" + SQL + "\n=================="); agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } public Map<String,String> Get_DoctorRoom() throws IOException, Exception { String branchID = Auth.user().getBranchCode(); String SQL = "SELECT room_id, room_name " + "FROM room_id " + "Where room_branch_code = '"+branchID+"'"; conn = agent.getConnectMYSql(); Stmt = conn.createStatement(); ResultSet rs = Stmt.executeQuery(SQL); Map <String,String>ResultList = new HashMap<String,String>(); while (rs.next()) { // vender_id,vender_name,create_by,create_datetime,update_by,update_datetime ResultList.put(rs.getString("room_id"), rs.getString("room_name")); } if (!rs.isClosed()) rs.close(); if (!Stmt.isClosed()) Stmt.close(); if (!conn.isClosed()) conn.close(); return ResultList; } public List<ScheduleModel> ListDoctorWorkDayIsCheckIn(){ String branchID = Auth.user().getBranchCode(); String SQL = "SELECT doctor_workday.workday_id, doctor_workday.doctor_id, pre_name.pre_name_th, doctor.first_name_th, doctor.last_name_th, " + "doctor_workday.start_datetime, doctor_workday.end_datetime, " + "CASE doctor_workday.checkin_status WHEN '1' THEN 'Waiting' WHEN '2' THEN 'CheckIn' WHEN '3' THEN 'CheckOut' END AS 'Status'," + "branch_room_id " + "FROM " + "doctor_workday " + "INNER JOIN doctor ON doctor_workday.doctor_id = doctor.doctor_id " + "INNER JOIN pre_name ON doctor.pre_name_id = pre_name.pre_name_id " + "WHERE DATE_FORMAT(doctor_workday.start_datetime,'%Y-%m-%d') = CURDATE() " + "AND doctor_workday.branch_id = '"+branchID+"' AND branch_room_id = '0' AND doctor_workday.checkin_status = '2' " + "ORDER BY doctor_workday.checkin_status desc "; try { conn = agent.getConnectMYSql(); Stmt = conn.createStatement(); ResultSet res = Stmt.executeQuery(SQL); List<ScheduleModel> schModelList = new ArrayList<ScheduleModel>(); while(res.next()){ ScheduleModel schModel = new ScheduleModel(); schModel.setBranchRoomId(res.getInt("branch_room_id")); schModel.setWorkDayId(res.getInt("workday_id")); schModel.setDoctorId(res.getInt("doctor_id")); schModel.setPre_name_th(res.getString("pre_name_th")); schModel.setFirst_name_th(res.getString("first_name_th")); schModel.setLast_name_th(res.getString("last_name_th")); schModel.setCheckInStatus(res.getString("Status")); String[] dateTime = res.getString("start_datetime").split(" "); String[] time = dateTime[1].split(Pattern.quote(".")); schModel.setStartDateTime(time[0]); String[] dateTimeend = res.getString("end_datetime").split(" "); String[] timeend = dateTimeend[1].split(Pattern.quote(".")); schModel.setEndDateTime(timeend[0]); schModelList.add(schModel); } if (!res.isClosed()) res.close(); if (!Stmt.isClosed()) Stmt.close(); if (!conn.isClosed()) conn.close(); return schModelList; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } public List<ScheduleModel> ListDoctorIsInRoom(){ String branchID = Auth.user().getBranchCode(); String SQL = "SELECT doctor_workday.workday_id, doctor_workday.doctor_id, pre_name.pre_name_th, doctor.first_name_th, doctor.last_name_th, " + "doctor_workday.start_datetime, doctor_workday.end_datetime, " + "CASE doctor_workday.checkin_status WHEN '1' THEN 'Waiting' WHEN '2' THEN 'CheckIn' WHEN '3' THEN 'CheckOut' END AS 'Status', " + "doctor_workday.branch_room_id, room_id.room_name " + "FROM " + "doctor_workday " + "INNER JOIN doctor ON doctor_workday.doctor_id = doctor.doctor_id " + "INNER JOIN pre_name ON doctor.pre_name_id = pre_name.pre_name_id " + "INNER JOIN room_id ON room_id.room_id = doctor_workday.branch_room_id " + "WHERE DATE_FORMAT(doctor_workday.start_datetime,'%Y-%m-%d') = CURDATE() " + "AND doctor_workday.branch_id = '"+branchID+"' AND doctor_workday.branch_room_id != '0' AND doctor_workday.checkin_status = '2' " + "ORDER BY doctor_workday.checkin_status desc "; try { conn = agent.getConnectMYSql(); Stmt = conn.createStatement(); ResultSet res = Stmt.executeQuery(SQL); List<ScheduleModel> schModelList = new ArrayList<ScheduleModel>(); while(res.next()){ ScheduleModel schModel = new ScheduleModel(); schModel.setBranchRoomId(res.getInt("branch_room_id")); schModel.setRoomName(res.getString("room_name")); schModel.setWorkDayId(res.getInt("workday_id")); schModel.setDoctorId(res.getInt("doctor_id")); schModel.setPre_name_th(res.getString("pre_name_th")); schModel.setFirst_name_th(res.getString("first_name_th")); schModel.setLast_name_th(res.getString("last_name_th")); schModel.setCheckInStatus(res.getString("Status")); String[] dateTime = res.getString("start_datetime").split(" "); String[] time = dateTime[1].split(Pattern.quote(".")); schModel.setStartDateTime(time[0]); String[] dateTimeend = res.getString("end_datetime").split(" "); String[] timeend = dateTimeend[1].split(Pattern.quote(".")); schModel.setEndDateTime(timeend[0]); schModelList.add(schModel); } if (!res.isClosed()) res.close(); if (!Stmt.isClosed()) Stmt.close(); if (!conn.isClosed()) conn.close(); return schModelList; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } public int DoctorUpdateRoom(ScheduleModel schModel){ String SQL = "UPDATE doctor_workday SET " +"branch_room_id = '"+schModel.getRoomId()+"' " +"WHERE doctor_id = '"+schModel.getDoctorId()+"' AND workday_id = '"+schModel.getWorkDayId()+"' AND DATE_FORMAT(start_datetime,'%Y-%m-%d') = CURDATE() "; agent.connectMySQL(); int rec = agent.exeUpdate(SQL); agent.disconnectMySQL(); return rec; } }
/** * Copyright 2015 StreamSets Inc. * * Licensed under the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.parser.log; import com.streamsets.pipeline.api.OnRecordError; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.Stage; import com.streamsets.pipeline.config.LogMode; import com.streamsets.pipeline.lib.parser.DataParser; import com.streamsets.pipeline.lib.parser.DataParserException; import com.streamsets.pipeline.lib.parser.DataParserFactory; import com.streamsets.pipeline.lib.parser.DataParserFactoryBuilder; import com.streamsets.pipeline.lib.parser.DataParserFormat; import com.streamsets.pipeline.sdk.ContextInfoCreator; import org.junit.Assert; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.HashMap; import java.util.Map; public class TestLogDataParserFactory { private Stage.Context getContext() { return ContextInfoCreator.createSourceContext("i", false, OnRecordError.TO_ERROR, Collections.<String>emptyList()); } @Test public void testGetParserStringWithRetainOriginalText() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(100) .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); DataParser parser = factory.getParser("id", "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326" .getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertTrue(record.has("/originalLine")); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertEquals(82, Long.parseLong(parser.getOffset())); parser.close(); } @Test public void testGetParserStringWithOutRetainOriginalText() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(100) .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, false) .build(); DataParser parser = factory.getParser("id", "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326" .getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertFalse(record.has("/originalLine")); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertEquals(82, Long.parseLong(parser.getOffset())); parser.close(); } @Test public void testGetParserReader() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(100) .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); InputStream is = new ByteArrayInputStream( "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326".getBytes()); DataParser parser = factory.getParser("id", is, "0"); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertTrue(record.has("/originalLine")); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertEquals(82, Long.parseLong(parser.getOffset())); parser.close(); } @Test(expected = DataParserException.class) public void testGetParserReaderLogLineCutShort() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(25) //cut short the capacity of the reader .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); InputStream is = new ByteArrayInputStream( "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326".getBytes()); DataParser parser = factory.getParser("id", is, "0"); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); try { parser.parse(); } finally { parser.close(); } } @Test public void testGetParserReaderWithOffset() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(150) .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, false) .build(); InputStream is = new ByteArrayInputStream( "Hello\n127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326".getBytes()); DataParser parser = factory.getParser("id", is, "6"); Assert.assertEquals(6, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertFalse(record.has("/originalLine")); //do not retain original line Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertEquals(88, Long.parseLong(parser.getOffset())); parser.close(); } @Test public void testFactoryCombinedLogFormatParser() throws DataParserException, IOException { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(1000) .setMode(LogMode.COMBINED_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); DataParser parser = factory.getParser("id", "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326 \"http:www.example.com/start.html\" \"Mozilla/4.08 [en] (Win98; I ;Nav)\"" .getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertNotNull(record); Assert.assertEquals("id::0", record.getHeader().getSourceId()); Assert.assertEquals("127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326 " + "\"http:www.example.com/start.html\" \"Mozilla/4.08 [en] (Win98; I ;Nav)\"", record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertEquals(152, Long.parseLong(parser.getOffset())); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REFERRER)); Assert.assertEquals("\"http:www.example.com/start.html\"", record.get("/" + Constants.REFERRER).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.AGENT)); Assert.assertEquals("\"Mozilla/4.08 [en] (Win98; I ;Nav)\"", record.get("/" + Constants.AGENT).getValueAsString()); parser.close(); } @Test public void testFactoryApacheErrorLogFormatParser() throws DataParserException, IOException { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(1000) .setMode(LogMode.APACHE_ERROR_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); DataParser parser = factory.getParser("id", "[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client denied by server configuration: /export/home/live/ap/htdocs/test" .getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertNotNull(record); Assert.assertEquals("id::0", record.getHeader().getSourceId()); Assert.assertEquals("[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client denied " + "by server configuration: /export/home/live/ap/htdocs/test", record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertEquals(125, Long.parseLong(parser.getOffset())); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("Wed Oct 11 14:32:52 2000", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.LOGLEVEL)); Assert.assertEquals("error", record.get("/" + Constants.LOGLEVEL).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.MESSAGE)); Assert.assertEquals("client denied by server configuration: /export/home/live/ap/htdocs/test", record.get("/" + Constants.MESSAGE).getValueAsString()); parser.close(); } @Test public void testFactoryApacheCustomFormatParser() throws DataParserException, IOException { String logLine = "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326 " + "\"http:www.example.com/start.html\" \"Mozilla/4.08 [en] (Win98; I ;Nav)\""; DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(1000) .setMode(LogMode.APACHE_CUSTOM_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .setConfig(LogDataParserFactory.APACHE_CUSTOMLOG_FORMAT_KEY, "%h %l %u [%t] \"%m %U %H\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"") .build(); DataParser parser = factory.getParser("id", logLine.getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertNotNull(record); Assert.assertEquals("id::0", record.getHeader().getSourceId()); Assert.assertEquals(logLine, record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertEquals(152, Long.parseLong(parser.getOffset())); Assert.assertTrue(record.has("/remoteHost")); Assert.assertEquals("127.0.0.1", record.get("/remoteHost").getValueAsString()); Assert.assertTrue(record.has("/logName")); Assert.assertEquals("ss", record.get("/logName").getValueAsString()); Assert.assertTrue(record.has("/remoteUser")); Assert.assertEquals("h", record.get("/remoteUser").getValueAsString()); Assert.assertTrue(record.has("/requestTime")); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/requestTime").getValueAsString()); Assert.assertTrue(record.has("/requestMethod")); Assert.assertEquals("GET", record.get("/requestMethod").getValueAsString()); Assert.assertTrue(record.has("/urlPath")); Assert.assertEquals("/apache_pb.gif", record.get("/urlPath").getValueAsString()); Assert.assertTrue(record.has("/httpversion")); Assert.assertEquals("1.0", record.get("/httpversion").getValueAsString()); Assert.assertTrue(record.has("/status")); Assert.assertEquals("200", record.get("/status").getValueAsString()); Assert.assertTrue(record.has("/bytesSent")); Assert.assertEquals("2326", record.get("/bytesSent").getValueAsString()); Assert.assertTrue(record.has("/referer")); Assert.assertEquals("http:www.example.com/start.html", record.get("/referer").getValueAsString()); Assert.assertTrue(record.has("/userAgent")); Assert.assertEquals("Mozilla/4.08 [en] (Win98; I ;Nav)", record.get("/userAgent").getValueAsString()); parser.close(); } @Test public void testFactoryRegexParser() throws DataParserException, IOException { String logLine = "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" " + "200 2326 Hello"; String regex = "^(\\S+) (\\S+) (\\S+) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(\\S+ \\S+ \\S+)\" (\\d{3}) (\\d+)"; Map<String, Integer> fieldToGroupMap = new HashMap<>(); fieldToGroupMap.put("remoteHost", 1); fieldToGroupMap.put("logName", 2); fieldToGroupMap.put("remoteUser", 3); fieldToGroupMap.put("requestTime", 4); fieldToGroupMap.put("request", 5); fieldToGroupMap.put("status", 6); fieldToGroupMap.put("bytesSent", 7); DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(1000) .setMode(LogMode.REGEX) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .setConfig(LogDataParserFactory.REGEX_KEY, regex) .setConfig(LogDataParserFactory.REGEX_FIELD_PATH_TO_GROUP_KEY, fieldToGroupMap) .build(); DataParser parser = factory.getParser("id", logLine.getBytes()); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertNotNull(record); Assert.assertEquals("id::0", record.getHeader().getSourceId()); Assert.assertEquals(logLine, record.get().getValueAsMap().get("originalLine").getValueAsString()); Assert.assertFalse(record.has("/truncated")); Assert.assertEquals(88, Long.parseLong(parser.getOffset())); Assert.assertTrue(record.has("/remoteHost")); Assert.assertEquals("127.0.0.1", record.get("/remoteHost").getValueAsString()); Assert.assertTrue(record.has("/logName")); Assert.assertEquals("ss", record.get("/logName").getValueAsString()); Assert.assertTrue(record.has("/remoteUser")); Assert.assertEquals("h", record.get("/remoteUser").getValueAsString()); Assert.assertTrue(record.has("/requestTime")); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/requestTime").getValueAsString()); Assert.assertTrue(record.has("/request")); Assert.assertEquals("GET /apache_pb.gif HTTP/1.0", record.get("/request").getValueAsString()); Assert.assertTrue(record.has("/status")); Assert.assertEquals("200", record.get("/status").getValueAsString()); Assert.assertTrue(record.has("/bytesSent")); Assert.assertEquals("2326", record.get("/bytesSent").getValueAsString()); parser.close(); } @Test public void testCharacterBaseParserMethod() throws Exception { DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(), DataParserFormat.LOG); DataParserFactory factory = dataParserFactoryBuilder .setMaxDataLen(100) .setMode(LogMode.COMMON_LOG_FORMAT) .setConfig(LogDataParserFactory.RETAIN_ORIGINAL_TEXT_KEY, true) .build(); DataParser parser = factory.getParser("id", "127.0.0.1 ss h [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326"); Assert.assertEquals(0, Long.parseLong(parser.getOffset())); Record record = parser.parse(); Assert.assertTrue(record.has("/originalLine")); Assert.assertTrue(record.has("/" + Constants.CLIENTIP)); Assert.assertEquals("127.0.0.1", record.get("/" + Constants.CLIENTIP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_IDENT)); Assert.assertEquals("ss", record.get("/" + Constants.USER_IDENT).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.USER_AUTH)); Assert.assertEquals("h", record.get("/" + Constants.USER_AUTH).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.TIMESTAMP)); Assert.assertEquals("10/Oct/2000:13:55:36 -0700", record.get("/" + Constants.TIMESTAMP).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.VERB)); Assert.assertEquals("GET", record.get("/" + Constants.VERB).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.REQUEST)); Assert.assertEquals("/apache_pb.gif", record.get("/" + Constants.REQUEST).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.HTTPVERSION)); Assert.assertEquals("1.0", record.get("/" + Constants.HTTPVERSION).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.RESPONSE)); Assert.assertEquals("200", record.get("/" + Constants.RESPONSE).getValueAsString()); Assert.assertTrue(record.has("/" + Constants.BYTES)); Assert.assertEquals("2326", record.get("/" + Constants.BYTES).getValueAsString()); Assert.assertEquals(82, Long.parseLong(parser.getOffset())); parser.close(); } }
/******************************************************************************* * Copyright 2015 rzorzorzo@users.sf.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.rzo.yajsw.os.posix.solaris; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.velocity.Template; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import org.rzo.yajsw.Constants; import org.rzo.yajsw.boot.WrapperLoader; import org.rzo.yajsw.os.AbstractService; import org.rzo.yajsw.os.JavaHome; import org.rzo.yajsw.os.OperatingSystem; import org.rzo.yajsw.os.posix.PosixUtils; import org.rzo.yajsw.os.posix.VelocityLog; public class SolarisService extends AbstractService implements Constants { String _launchdDir; String _plistTemplate; String _plistFile; int _stopTimeout; String _plistName; String _execCmd; String _confFile; PosixUtils _utils = new PosixUtils(); public void init() { if (_name == null) { System.out.println("no name for daemon -> abort"); return; } _launchdDir = _config.getString("wrapper.launchd.dir", System.getProperty("user.home") + "/Library/LaunchAgents"); File daemonDir = new File(_launchdDir); if (!daemonDir.exists() || !daemonDir.isDirectory()) { System.out .println("Error " + _launchdDir + " : is not a directory"); return; } String wrapperJar = WrapperLoader.getWrapperJar().trim(); String wrapperHome = "."; try { wrapperHome = new File(wrapperJar).getParentFile() .getCanonicalPath(); } catch (IOException e1) { e1.printStackTrace(); } String confFile = _config.getString("wrapper.config"); String confDir = null; if (confFile != null) { File f = new File(confFile); if (f.exists()) try { confDir = f.getParentFile().getCanonicalPath(); } catch (IOException e) { } } if (confDir == null) confDir = wrapperHome + "/conf"; if (confFile == null) { System.out.println("no conf file found -> abort"); return; } try { _confFile = new File(confFile).getCanonicalPath(); } catch (IOException e) { e.printStackTrace(); } _plistTemplate = _config.getString("wrapper.launchd.template", wrapperHome + "templeates/launchd.plist.vm"); File daemonTemplate = new File(_plistTemplate); if (!daemonTemplate.exists() || !daemonTemplate.isFile()) { System.out.println("Error " + _plistTemplate + " : template file not found"); return; } File daemonScript = new File(daemonDir, "wrapper." + getName()); if (daemonScript.exists()) System.out.println(daemonScript.getAbsolutePath() + " already exists -> overwrite"); _plistName = "wrapper." + _name; File plistFile = new File(_launchdDir, _plistName + ".plist"); try { _plistFile = plistFile.getCanonicalPath(); } catch (IOException e) { e.printStackTrace(); } JavaHome javaHome = OperatingSystem.instance().getJavaHome(_config); String java = System.clearProperty("java.home") + "/bin/java"; try { java = new File(java).getCanonicalPath(); } catch (IOException e) { e.printStackTrace(); } _execCmd = String .format("%1$s -Dwrapper.service=true -Dwrapper.visible=false -jar %2$s -c %3$s", java, wrapperJar, _confFile); } public boolean install() { if (_plistFile == null) { System.out.println("Error : not initialized -> abort"); return false; } try { File daemonTemplate = new File(_plistTemplate); VelocityEngine ve = new VelocityEngine(); ve.setProperty(VelocityEngine.RESOURCE_LOADER, "file"); ve.setProperty("file.resource.loader.path", daemonTemplate.getParent()); ve.setProperty("runtime.log.logsystem.class", VelocityLog.class.getCanonicalName()); ve.init(); Template t = ve.getTemplate(daemonTemplate.getName()); VelocityContext context = new VelocityContext(); context.put("name", _plistName); context.put("command", Arrays.asList(_execCmd.split(" "))); context.put("autoStart", "AUTOMATIC".equals(_config.getString( "wrapper.ntservice.starttype", DEFAULT_SERVICE_START_TYPE))); FileWriter writer = new FileWriter(_plistFile); t.merge(context, writer); writer.flush(); writer.close(); _utils.osCommand("launchctl load " + _plistFile, 5000); } catch (Exception ex) { ex.printStackTrace(); return false; } return isInstalled(); } public boolean isInstalled() { String sp = String.format(".*\\d+.*%1$s.*", _plistName); Pattern p = Pattern.compile(sp, Pattern.DOTALL); String list = _utils.osCommand("launchctl list", 5000); Matcher m = p.matcher(list); return m.matches(); } public boolean isRunning() { int pid = getPid(); return pid > 0; } public boolean start() { if (isRunning()) { System.out.println("already running"); return true; } _utils.osCommand("launchctl start " + _plistName, 5000); return isRunning(); } public boolean stop() { if (isRunning()) { _utils.osCommand("launchctl stop " + _plistName, 5000); return !isRunning(); } return true; } public boolean uninstall() { if (isRunning()) stop(); _utils.osCommand("launchctl unload " + _plistFile, 5000); new File(_plistFile).delete(); return true; } public int state() { int result = 0; if (isInstalled()) result |= STATE_INSTALLED; if (isRunning()) result |= STATE_RUNNING; return result; } public int getPid() { try { String sp = String.format("(\\d+)\\s*\\-\\s*%1$s", _plistName); Pattern p = Pattern.compile(sp, Pattern.DOTALL); String list = _utils.osCommand("launchctl list", 5000); Matcher m = p.matcher(list); m.find(); int pid = Integer.parseInt(m.group(1)); return pid; } catch (Exception ex) { // ex.printStackTrace(); } return -1; } public void setLogger(Logger logger) { super.setLogger(logger); _utils.setLog(logger); } }
/* Generated By:JavaCC: Do not edit this line. ObjDumpParser.java */ package avrora.syntax.objdump; import avrora.syntax.*; import java.io.*; import java.util.Enumeration; import java.util.Vector; import cck.parser.SimpleCharStream; public class ObjDumpParser extends AbstractParser implements ObjDumpParserConstants { protected RawModule rawModule; public ObjDumpParser(Reader stream, RawModule m, String fname) { this(new FileMarkingTokenManager(new SimpleCharStream(stream, 1, 1, 4096), fname)); module = m; rawModule = m; } /* Begin GRAMMAR */ public void Module() throws ParseException { Header(); label_1: while (true) { Section(); switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case START: break; default: jj_la1[0] = jj_gen; break label_1; } } jj_consume_token(0); } public void Header() throws ParseException { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case PROGRAM: jj_consume_token(PROGRAM); jj_consume_token(STRING_LITERAL); jj_consume_token(154); break; default: jj_la1[1] = jj_gen; } label_2: while (true) { SectionDecl(); switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case SECTION: break; default: jj_la1[2] = jj_gen; break label_2; } } } public void SectionDecl() throws ParseException { Token name, vma, lma; jj_consume_token(SECTION); name = jj_consume_token(DOT_IDENTIFIER); jj_consume_token(SIZE); jj_consume_token(155); jj_consume_token(INTEGER_LITERAL); jj_consume_token(VMA); jj_consume_token(155); vma = jj_consume_token(INTEGER_LITERAL); jj_consume_token(LMA); jj_consume_token(155); lma = jj_consume_token(INTEGER_LITERAL); jj_consume_token(OFFSET); jj_consume_token(155); jj_consume_token(INTEGER_LITERAL); rawModule.newSection(name, vma, lma); } public void Section() throws ParseException { Token sect; jj_consume_token(START); sect = jj_consume_token(DOT_IDENTIFIER); jj_consume_token(154); rawModule.enterSection(sect); label_3: while (true) { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case INTEGER_LITERAL: case LABEL: break; default: jj_la1[3] = jj_gen; break label_3; } Statement(); } } public void Statement() throws ParseException { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case LABEL: Label(); break; case INTEGER_LITERAL: Item(); break; default: jj_la1[4] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } public void Item() throws ParseException { Token addr; addr = jj_consume_token(INTEGER_LITERAL); jj_consume_token(154); rawModule.setAddress(addr); RawData(); rawModule.setAddress(addr); switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ADD: case ADC: case ADIW: case AND: case ANDI: case ASR: case BCLR: case BLD: case BRBC: case BRBS: case BRCC: case BRCS: case BREAK: case BREQ: case BRGE: case BRHC: case BRHS: case BRID: case BRIE: case BRLO: case BRLT: case BRMI: case BRNE: case BRPL: case BRSH: case BRTC: case BRTS: case BRVC: case BRVS: case BSET: case BST: case CALL: case CBI: case CBR: case CLC: case CLH: case CLI: case CLN: case CLR: case CLS: case CLT: case CLV: case CLZ: case COM: case CP: case CPC: case CPI: case CPSE: case DEC: case EICALL: case EIJMP: case ELPM: case EOR: case FMUL: case FMULS: case FMULSU: case ICALL: case IJMP: case IN: case INC: case JMP: case LD: case LDD: case LDI: case LDS: case LPM: case LSL: case LSR: case MOV: case MOVW: case MUL: case MULS: case MULSU: case NEG: case NOP: case OR: case ORI: case OUT: case POP: case PUSH: case RCALL: case RET: case RETI: case RJMP: case ROL: case ROR: case SBC: case SBCI: case SBI: case SBIC: case SBIS: case SBIW: case SBR: case SBRC: case SBRS: case SEC: case SEH: case SEI: case SEN: case SER: case SES: case SET: case SEV: case SEZ: case SLEEP: case SPM: case ST: case STD: case STS: case SUB: case SUBI: case SWAP: case TST: case WDR: Instruction(); break; case WORD: Data(); break; default: jj_la1[5] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } public void RawData() throws ParseException { if (jj_2_1(3)) { Raw4(); } else { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case INTEGER_LITERAL: Raw2(); break; default: jj_la1[6] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } public void Raw2() throws ParseException { Token b1, b2; b1 = jj_consume_token(INTEGER_LITERAL); b2 = jj_consume_token(INTEGER_LITERAL); rawModule.addBytes(b1, b2); } public void Raw4() throws ParseException { Token b1, b2, b3, b4; b1 = jj_consume_token(INTEGER_LITERAL); b2 = jj_consume_token(INTEGER_LITERAL); b3 = jj_consume_token(INTEGER_LITERAL); b4 = jj_consume_token(INTEGER_LITERAL); rawModule.addBytes(b1, b2, b3, b4); } public void Instruction() throws ParseException { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ADD: case ADC: case AND: case CP: case CPC: case CPSE: case EOR: case FMUL: case FMULS: case FMULSU: case MOV: case MOVW: case MUL: case MULS: case MULSU: case OR: case SBC: case SUB: InstrGPRGPR(); break; case ASR: case CLR: case COM: case DEC: case INC: case LSL: case LSR: case NEG: case POP: case PUSH: case ROL: case ROR: case SER: case SWAP: case TST: InstrGPR(); break; case ADIW: case ANDI: case BLD: case BST: case CBR: case CPI: case ORI: case SBCI: case SBIW: case SBR: case SBRC: case SBRS: case SUBI: InstrGPRIMM(); break; case IN: InstrInput(); break; case OUT: InstrOutput(); break; case BCLR: case BRCC: case BRCS: case BREQ: case BRGE: case BRHC: case BRHS: case BRID: case BRIE: case BRLO: case BRLT: case BRMI: case BRNE: case BRPL: case BRSH: case BRTC: case BRTS: case BRVC: case BRVS: case BSET: case CALL: case JMP: case RCALL: case RJMP: InstrIMM(); break; case BRBC: case BRBS: case CBI: case SBI: case SBIC: case SBIS: InstrIMMIMM(); break; case BREAK: case CLC: case CLH: case CLI: case CLN: case CLS: case CLT: case CLV: case CLZ: case EICALL: case EIJMP: case ICALL: case IJMP: case NOP: case RET: case RETI: case SEC: case SEH: case SEI: case SEN: case SES: case SET: case SEV: case SEZ: case SLEEP: case SPM: case WDR: InstrBARE(); break; case ELPM: case LD: case LDD: case LDI: case LDS: case LPM: InstrLoad(); break; case ST: case STD: case STS: InstrStore(); break; default: jj_la1[7] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } public void Data() throws ParseException { jj_consume_token(WORD); jj_consume_token(INTEGER_LITERAL); } public void InstrGPRGPR() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = OpcodeGPRGPR(); r1 = Register(); jj_consume_token(156); r2 = Register(); module.addInstruction(t.image, t, r1, r2); } public Token OpcodeGPRGPR() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ADD: t = jj_consume_token(ADD); break; case ADC: t = jj_consume_token(ADC); break; case SUB: t = jj_consume_token(SUB); break; case SBC: t = jj_consume_token(SBC); break; case AND: t = jj_consume_token(AND); break; case OR: t = jj_consume_token(OR); break; case EOR: t = jj_consume_token(EOR); break; case MUL: t = jj_consume_token(MUL); break; case MULS: t = jj_consume_token(MULS); break; case MULSU: t = jj_consume_token(MULSU); break; case FMUL: t = jj_consume_token(FMUL); break; case FMULS: t = jj_consume_token(FMULS); break; case FMULSU: t = jj_consume_token(FMULSU); break; case CPSE: t = jj_consume_token(CPSE); break; case CP: t = jj_consume_token(CP); break; case CPC: t = jj_consume_token(CPC); break; case MOV: t = jj_consume_token(MOV); break; case MOVW: t = jj_consume_token(MOVW); break; default: jj_la1[8] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrGPR() throws ParseException { Token t; SyntacticOperand.Register r1; t = OpcodeGPR(); r1 = Register(); module.addInstruction(t.image, t, r1); } public Token OpcodeGPR() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case COM: t = jj_consume_token(COM); break; case NEG: t = jj_consume_token(NEG); break; case INC: t = jj_consume_token(INC); break; case DEC: t = jj_consume_token(DEC); break; case TST: t = jj_consume_token(TST); break; case CLR: t = jj_consume_token(CLR); break; case SER: t = jj_consume_token(SER); break; case PUSH: t = jj_consume_token(PUSH); break; case POP: t = jj_consume_token(POP); break; case LSL: t = jj_consume_token(LSL); break; case LSR: t = jj_consume_token(LSR); break; case ROL: t = jj_consume_token(ROL); break; case ROR: t = jj_consume_token(ROR); break; case ASR: t = jj_consume_token(ASR); break; case SWAP: t = jj_consume_token(SWAP); break; default: jj_la1[9] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrGPRIMM() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = OpcodeGPRIMM(); r1 = Register(); jj_consume_token(156); c1 = Const(); module.addInstruction(t.image, t, r1, c1); } public Token OpcodeGPRIMM() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ADIW: t = jj_consume_token(ADIW); break; case SUBI: t = jj_consume_token(SUBI); break; case SBCI: t = jj_consume_token(SBCI); break; case SBIW: t = jj_consume_token(SBIW); break; case ANDI: t = jj_consume_token(ANDI); break; case ORI: t = jj_consume_token(ORI); break; case SBR: t = jj_consume_token(SBR); break; case CBR: t = jj_consume_token(CBR); break; case CPI: t = jj_consume_token(CPI); break; case SBRC: t = jj_consume_token(SBRC); break; case SBRS: t = jj_consume_token(SBRS); break; case BST: t = jj_consume_token(BST); break; case BLD: t = jj_consume_token(BLD); break; default: jj_la1[10] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrIMM() throws ParseException { Token t; SyntacticOperand.Expr c1; t = OpcodeIMM(); c1 = Const(); module.addInstruction(t.image, t, c1); } public Token OpcodeIMM() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case RJMP: t = jj_consume_token(RJMP); break; case JMP: t = jj_consume_token(JMP); break; case RCALL: t = jj_consume_token(RCALL); break; case CALL: t = jj_consume_token(CALL); break; case BREQ: t = jj_consume_token(BREQ); break; case BRNE: t = jj_consume_token(BRNE); break; case BRCS: t = jj_consume_token(BRCS); break; case BRCC: t = jj_consume_token(BRCC); break; case BRSH: t = jj_consume_token(BRSH); break; case BRLO: t = jj_consume_token(BRLO); break; case BRMI: t = jj_consume_token(BRMI); break; case BRPL: t = jj_consume_token(BRPL); break; case BRGE: t = jj_consume_token(BRGE); break; case BRLT: t = jj_consume_token(BRLT); break; case BRHS: t = jj_consume_token(BRHS); break; case BRHC: t = jj_consume_token(BRHC); break; case BRTS: t = jj_consume_token(BRTS); break; case BRTC: t = jj_consume_token(BRTC); break; case BRVS: t = jj_consume_token(BRVS); break; case BRVC: t = jj_consume_token(BRVC); break; case BRIE: t = jj_consume_token(BRIE); break; case BRID: t = jj_consume_token(BRID); break; case BSET: t = jj_consume_token(BSET); break; case BCLR: t = jj_consume_token(BCLR); break; default: jj_la1[11] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrIMMIMM() throws ParseException { Token t; SyntacticOperand.Expr c1, c2; t = OpcodeIMMIMM(); c1 = Const(); jj_consume_token(156); c2 = Const(); module.addInstruction(t.image, t, c1, c2); } public Token OpcodeIMMIMM() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case SBIC: t = jj_consume_token(SBIC); break; case SBIS: t = jj_consume_token(SBIS); break; case BRBS: t = jj_consume_token(BRBS); break; case BRBC: t = jj_consume_token(BRBC); break; case SBI: t = jj_consume_token(SBI); break; case CBI: t = jj_consume_token(CBI); break; default: jj_la1[12] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrLoad() throws ParseException { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case LDI: InstrLDI(); break; case LD: InstrLD_variant(); break; case LDD: InstrLDD(); break; case LDS: InstrLDS(); break; case ELPM: case LPM: InstrLPM_variant(); break; default: jj_la1[13] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } public void InstrLDI() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = jj_consume_token(LDI); r1 = Register(); jj_consume_token(156); c1 = Const(); module.addInstruction(t.image, t, r1, c1); } public void InstrLD_variant() throws ParseException { if (jj_2_2(5)) { InstrLDPI(); } else if (jj_2_3(4)) { InstrLDPD(); } else { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case LD: InstrLD(); break; default: jj_la1[14] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } public void InstrLD() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(LD); r1 = Register(); jj_consume_token(156); r2 = Register(); module.addInstruction("ld", t, r1, r2); } public void InstrLDPI() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(LD); r1 = Register(); jj_consume_token(156); r2 = Register(); jj_consume_token(157); module.addInstruction("ldpi", t, r1, r2); } public void InstrLDPD() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(LD); r1 = Register(); jj_consume_token(156); jj_consume_token(158); r2 = Register(); module.addInstruction("ldpd", t, r1, r2); } public void InstrLDD() throws ParseException { Token t; SyntacticOperand.Register r1, r2; SyntacticOperand.Expr c1; t = jj_consume_token(LDD); r1 = Register(); jj_consume_token(156); r2 = Register(); jj_consume_token(157); c1 = Const(); module.addInstruction(t.image, t, r1, r2, c1); } public void InstrLDS() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = jj_consume_token(LDS); r1 = Register(); jj_consume_token(156); c1 = Const(); module.addInstruction(t.image, t, r1, c1); } public void InstrLPM_variant() throws ParseException { if (jj_2_4(5)) { InstrLPMGPRGPRP(); } else if (jj_2_5(3)) { InstrLPMGPRGPR(); } else { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ELPM: case LPM: InstrLPMBARE(); break; default: jj_la1[15] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } public void InstrLPMGPRGPR() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = OpcodeLPM(); r1 = Register(); jj_consume_token(156); r2 = Register(); module.addInstruction(t.image + "d", t, r1, r2); } public void InstrLPMGPRGPRP() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = OpcodeLPM(); r1 = Register(); jj_consume_token(156); r2 = Register(); jj_consume_token(157); module.addInstruction(t.image + "pi", t, r1, r2); } public void InstrLPMBARE() throws ParseException { Token t; t = OpcodeLPM(); module.addInstruction(t.image, t); } public Token OpcodeLPM() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case LPM: t = jj_consume_token(LPM); break; case ELPM: t = jj_consume_token(ELPM); break; default: jj_la1[16] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return t; } public void InstrStore() throws ParseException { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ST: InstrST_variant(); break; case STD: InstrSTD(); break; case STS: InstrSTS(); break; default: jj_la1[17] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } public void InstrST_variant() throws ParseException { if (jj_2_6(3)) { InstrST(); } else if (jj_2_7(3)) { InstrSTPI(); } else { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case ST: InstrSTPD(); break; default: jj_la1[18] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } public void InstrST() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(ST); r1 = Register(); jj_consume_token(156); r2 = Register(); module.addInstruction("st", t, r1, r2); } public void InstrSTPI() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(ST); r1 = Register(); jj_consume_token(157); jj_consume_token(156); r2 = Register(); module.addInstruction("stpi", t, r1, r2); } public void InstrSTPD() throws ParseException { Token t; SyntacticOperand.Register r1, r2; t = jj_consume_token(ST); jj_consume_token(158); r1 = Register(); jj_consume_token(156); r2 = Register(); module.addInstruction("stpd", t, r1, r2); } public void InstrSTD() throws ParseException { Token t; SyntacticOperand.Register r1, r2; SyntacticOperand.Expr c1; t = jj_consume_token(STD); r1 = Register(); jj_consume_token(157); c1 = Const(); jj_consume_token(156); r2 = Register(); module.addInstruction(t.image, t, r1, c1, r2); } public void InstrSTS() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = jj_consume_token(STS); c1 = Const(); jj_consume_token(156); r1 = Register(); module.addInstruction(t.image, t, c1, r1); } public void InstrBARE() throws ParseException { Token t; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case IJMP: t = jj_consume_token(IJMP); break; case ICALL: t = jj_consume_token(ICALL); break; case RET: t = jj_consume_token(RET); break; case RETI: t = jj_consume_token(RETI); break; case SEC: t = jj_consume_token(SEC); break; case CLC: t = jj_consume_token(CLC); break; case SEN: t = jj_consume_token(SEN); break; case CLN: t = jj_consume_token(CLN); break; case SEZ: t = jj_consume_token(SEZ); break; case CLZ: t = jj_consume_token(CLZ); break; case SEI: t = jj_consume_token(SEI); break; case CLI: t = jj_consume_token(CLI); break; case SES: t = jj_consume_token(SES); break; case CLS: t = jj_consume_token(CLS); break; case SEV: t = jj_consume_token(SEV); break; case CLV: t = jj_consume_token(CLV); break; case SET: t = jj_consume_token(SET); break; case CLT: t = jj_consume_token(CLT); break; case SEH: t = jj_consume_token(SEH); break; case CLH: t = jj_consume_token(CLH); break; case NOP: t = jj_consume_token(NOP); break; case SLEEP: t = jj_consume_token(SLEEP); break; case WDR: t = jj_consume_token(WDR); break; case BREAK: t = jj_consume_token(BREAK); break; case SPM: t = jj_consume_token(SPM); break; case EIJMP: t = jj_consume_token(EIJMP); break; case EICALL: t = jj_consume_token(EICALL); break; default: jj_la1[19] = jj_gen; jj_consume_token(-1); throw new ParseException(); } module.addInstruction(t.image, t); } public void InstrInput() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = jj_consume_token(IN); r1 = Register(); jj_consume_token(156); c1 = Const(); module.addInstruction(t.image, t, r1, c1); } public void InstrOutput() throws ParseException { Token t; SyntacticOperand.Register r1; SyntacticOperand.Expr c1; t = jj_consume_token(OUT); c1 = Const(); jj_consume_token(156); r1 = Register(); module.addInstruction(t.image, t, c1, r1); } public SyntacticOperand.Register Register() throws ParseException { Token tok; tok = jj_consume_token(IDENTIFIER); return module.newOperand(tok); } public void Label() throws ParseException { Token tok; Token v; jj_consume_token(LABEL); v = jj_consume_token(INTEGER_LITERAL); tok = jj_consume_token(STRING_LITERAL); jj_consume_token(154); rawModule.addQuotedLabelAt(v, tok); } public SyntacticOperand.Expr Const() throws ParseException { Expr e; if (jj_2_8(2147483647)) { e = RelExpr(); } else { switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case INTEGER_LITERAL: case IDENTIFIER: e = Term(); break; default: jj_la1[20] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } return module.newOperand(e); } public Expr Term() throws ParseException { Token tok; Expr e; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case IDENTIFIER: tok = jj_consume_token(IDENTIFIER); e = new Expr.Variable(tok); break; case INTEGER_LITERAL: tok = jj_consume_token(INTEGER_LITERAL); e = new Expr.Constant(tok); break; default: jj_la1[21] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return e; } public Expr RelExpr() throws ParseException { Token ltok; Token op; Token rtok; ltok = jj_consume_token(159); op = BinOp(); rtok = jj_consume_token(INTEGER_LITERAL); return new Expr.RelativeAddress(ltok, op, rtok); } public Token BinOp() throws ParseException { Token tok; switch (jj_ntk == -1 ? jj_ntk() : jj_ntk) { case 157: tok = jj_consume_token(157); break; case 158: tok = jj_consume_token(158); break; default: jj_la1[22] = jj_gen; jj_consume_token(-1); throw new ParseException(); } return tok; } private boolean jj_2_1(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_1(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(0, xla); } } private boolean jj_2_2(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_2(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(1, xla); } } private boolean jj_2_3(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_3(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(2, xla); } } private boolean jj_2_4(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_4(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(3, xla); } } private boolean jj_2_5(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_5(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(4, xla); } } private boolean jj_2_6(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_6(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(5, xla); } } private boolean jj_2_7(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_7(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(6, xla); } } private boolean jj_2_8(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return !jj_3_8(); } catch (LookaheadSuccess ls) { return true; } finally { jj_save(7, xla); } } private boolean jj_3_3() { return jj_3R_6(); } private boolean jj_3R_8() { return jj_3R_12() || jj_3R_11() || jj_scan_token(156); } private boolean jj_3_2() { return jj_3R_5(); } private boolean jj_3R_6() { return jj_scan_token(LD) || jj_3R_11() || jj_scan_token(156) || jj_scan_token(158); } private boolean jj_3R_10() { return jj_scan_token(ST) || jj_3R_11() || jj_scan_token(157); } private boolean jj_3_1() { return jj_3R_4(); } private boolean jj_3R_11() { return jj_scan_token(IDENTIFIER); } private boolean jj_3R_12() { Token xsp; xsp = jj_scanpos; if (jj_scan_token(100)) { jj_scanpos = xsp; if (jj_scan_token(86)) return true; } return false; } private boolean jj_3_5() { return jj_3R_8(); } private boolean jj_3_4() { return jj_3R_7(); } private boolean jj_3_8() { return jj_scan_token(159); } private boolean jj_3R_5() { if (jj_scan_token(LD)) return true; if (jj_3R_11()) return true; if (jj_scan_token(156)) return true; return jj_3R_11() || jj_scan_token(157); } private boolean jj_3R_9() { return jj_scan_token(ST) || jj_3R_11() || jj_scan_token(156); } private boolean jj_3R_4() { return jj_scan_token(INTEGER_LITERAL) || jj_scan_token(INTEGER_LITERAL) || jj_scan_token(INTEGER_LITERAL); } private boolean jj_3_7() { return jj_3R_10(); } private boolean jj_3R_7() { if (jj_3R_12()) return true; if (jj_3R_11()) return true; return jj_scan_token(156) || jj_3R_11() || jj_scan_token(157); } private boolean jj_3_6() { return jj_3R_9(); } public ObjDumpParserTokenManager token_source; public Token token, jj_nt; private int jj_ntk; private Token jj_scanpos, jj_lastpos; private int jj_la; private int jj_gen; private final int[] jj_la1 = new int[23]; private static int[] jj_la1_0; private static int[] jj_la1_1; private static int[] jj_la1_2; private static int[] jj_la1_3; private static int[] jj_la1_4; static { jj_la1_0(); jj_la1_1(); jj_la1_2(); jj_la1_3(); jj_la1_4(); } private static void jj_la1_0() { jj_la1_0 = new int[] { 0x8000000, 0x20000000, 0x10000000, 0x40000200, 0x40000200, 0x4000000, 0x200, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x200, 0x200, 0x0, }; } private static void jj_la1_1() { jj_la1_1 = new int[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffff8, 0x0, 0xfffffff8, 0x58, 0x100, 0x4a0, 0xffff6200, 0x1800, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8000, 0x0, 0x0, 0x0, }; } private static void jj_la1_2() { jj_la1_2 = new int[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0xffffffff, 0x0, 0xffffffff, 0x7858000, 0x40084200, 0x20012, 0x80000005, 0x8, 0x400000, 0x0, 0x400000, 0x400000, 0x0, 0x0, 0x18303de0, 0x0, 0x0, 0x0, }; } private static void jj_la1_3() { jj_la1_3 = new int[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0xffffffff, 0x0, 0xffffffff, 0x2004f80, 0x1861060, 0xc4008000, 0x480000, 0x38000000, 0x1f, 0x1, 0x10, 0x10, 0x0, 0x0, 0x302000, 0x0, 0x0, 0x0, }; } private static void jj_la1_4() { jj_la1_4 = new int[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0x1fffff, 0x0, 0x1fffff, 0x10000, 0xc0040, 0x20003, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe000, 0x2000, 0x101fbc, 0x200000, 0x200000, 0x60000000, }; } private final JJCalls[] jj_2_rtns = new JJCalls[8]; private boolean jj_rescan = false; private int jj_gc = 0; public ObjDumpParser(ObjDumpParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 23; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { jj_gen++; if (++jj_gc > 100) { jj_gc = 0; for (int i = 0; i < jj_2_rtns.length; i++) { JJCalls c = jj_2_rtns[i]; while (c != null) { if (c.gen < jj_gen) c.first = null; c = c.next; } } } return token; } token = oldToken; jj_kind = kind; throw generateParseException(); } private static class LookaheadSuccess extends Error { private static final long serialVersionUID = 1L; } private final LookaheadSuccess jj_ls = new LookaheadSuccess(); private boolean jj_scan_token(int kind) { if (jj_scanpos == jj_lastpos) { jj_la--; if (jj_scanpos.next == null) { jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); } else { jj_lastpos = jj_scanpos = jj_scanpos.next; } } else { jj_scanpos = jj_scanpos.next; } if (jj_rescan) { int i = 0; Token tok = token; while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } if (tok != null) jj_add_error_token(kind, i); } if (jj_scanpos.kind != kind) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; return false; } private int jj_ntk() { if ((jj_nt = token.next) == null) return jj_ntk = (token.next = token_source.getNextToken()).kind; else return jj_ntk = jj_nt.kind; } private Vector<int[]> jj_expentries = new Vector<int[]>(); private int[] jj_expentry; private int jj_kind = -1; private int[] jj_lasttokens = new int[100]; private int jj_endpos; private void jj_add_error_token(int kind, int pos) { if (pos >= 100) return; if (pos == jj_endpos + 1) { jj_lasttokens[jj_endpos++] = kind; } else if (jj_endpos != 0) { jj_expentry = new int[jj_endpos]; System.arraycopy(jj_lasttokens, 0, jj_expentry, 0, jj_endpos); boolean exists = false; for (Enumeration<int[]> e = jj_expentries.elements(); e.hasMoreElements();) { int[] oldentry = e.nextElement(); if (oldentry.length == jj_expentry.length) { exists = true; for (int i = 0; i < jj_expentry.length; i++) { if (oldentry[i] != jj_expentry[i]) { exists = false; break; } } if (exists) break; } } if (!exists) jj_expentries.addElement(jj_expentry); if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind; } } public ParseException generateParseException() { jj_expentries.removeAllElements(); boolean[] la1tokens = new boolean[160]; for (int i = 0; i < 160; i++) { la1tokens[i] = false; } if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; } for (int i = 0; i < 23; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & 1 << j) != 0) { la1tokens[j] = true; } if ((jj_la1_1[i] & 1 << j) != 0) { la1tokens[32 + j] = true; } if ((jj_la1_2[i] & 1 << j) != 0) { la1tokens[64 + j] = true; } if ((jj_la1_3[i] & 1 << j) != 0) { la1tokens[96 + j] = true; } if ((jj_la1_4[i] & 1 << j) != 0) { la1tokens[128 + j] = true; } } } } for (int i = 0; i < 160; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; jj_expentries.addElement(jj_expentry); } } jj_endpos = 0; jj_rescan_token(); jj_add_error_token(0, 0); int[][] exptokseq = new int[jj_expentries.size()][]; for (int i = 0; i < jj_expentries.size(); i++) { exptokseq[i] = jj_expentries.elementAt(i); } return new ParseException(token, exptokseq, tokenImage); } private void jj_rescan_token() { jj_rescan = true; for (int i = 0; i < 8; i++) { try { JJCalls p = jj_2_rtns[i]; do { if (p.gen > jj_gen) { jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; switch (i) { case 0: jj_3_1(); break; case 1: jj_3_2(); break; case 2: jj_3_3(); break; case 3: jj_3_4(); break; case 4: jj_3_5(); break; case 5: jj_3_6(); break; case 6: jj_3_7(); break; case 7: jj_3_8(); break; } } p = p.next; } while (p != null); } catch (LookaheadSuccess ls) { } } jj_rescan = false; } private void jj_save(int index, int xla) { JJCalls p = jj_2_rtns[index]; while (p.gen > jj_gen) { if (p.next == null) { p = p.next = new JJCalls(); break; } p = p.next; } p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla; } static class JJCalls { int gen; Token first; int arg; JJCalls next; } }
package com.kamikaze.pfordelta; import java.nio.IntBuffer; /** * PForDeltaUnpack128WIthIntBuffer is almost the same as PForDeltaUnpack128, except that it use IntBuffer as input instead of int[]. */ public class PForDeltaUnpack128WIthIntBuffer{ static public void unpack(int[] out, IntBuffer in, int bits) { switch (bits) { case 0: unpack0(out, in); break; case 1: unpack1(out, in); break; case 2: unpack2(out, in); break; case 3: unpack3(out, in); break; case 4: unpack4(out, in); break; case 5: unpack5(out, in); break; case 6: unpack6(out, in); break; case 7: unpack7(out, in); break; case 8: unpack8(out, in); break; case 9: unpack9(out, in); break; case 10: unpack10(out, in); break; case 11: unpack11(out, in); break; case 12: unpack12(out, in); break; case 13: unpack13(out, in); break; case 16: unpack16(out, in); break; case 20: unpack20(out, in); break; case 28: unpack28(out, in); break; default: break; } } static private void unpack0(int[] out, IntBuffer in) { } static private void unpack1(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 1; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 1) & mask; out[2+outOffset] = (curInputValue0 >>> 2) & mask; out[3+outOffset] = (curInputValue0 >>> 3) & mask; out[4+outOffset] = (curInputValue0 >>> 4) & mask; out[5+outOffset] = (curInputValue0 >>> 5) & mask; out[6+outOffset] = (curInputValue0 >>> 6) & mask; out[7+outOffset] = (curInputValue0 >>> 7) & mask; out[8+outOffset] = (curInputValue0 >>> 8) & mask; out[9+outOffset] = (curInputValue0 >>> 9) & mask; out[10+outOffset] = (curInputValue0 >>> 10) & mask; out[11+outOffset] = (curInputValue0 >>> 11) & mask; out[12+outOffset] = (curInputValue0 >>> 12) & mask; out[13+outOffset] = (curInputValue0 >>> 13) & mask; out[14+outOffset] = (curInputValue0 >>> 14) & mask; out[15+outOffset] = (curInputValue0 >>> 15) & mask; out[16+outOffset] = (curInputValue0 >>> 16) & mask; out[17+outOffset] = (curInputValue0 >>> 17) & mask; out[18+outOffset] = (curInputValue0 >>> 18) & mask; out[19+outOffset] = (curInputValue0 >>> 19) & mask; out[20+outOffset] = (curInputValue0 >>> 20) & mask; out[21+outOffset] = (curInputValue0 >>> 21) & mask; out[22+outOffset] = (curInputValue0 >>> 22) & mask; out[23+outOffset] = (curInputValue0 >>> 23) & mask; out[24+outOffset] = (curInputValue0 >>> 24) & mask; out[25+outOffset] = (curInputValue0 >>> 25) & mask; out[26+outOffset] = (curInputValue0 >>> 26) & mask; out[27+outOffset] = (curInputValue0 >>> 27) & mask; out[28+outOffset] = (curInputValue0 >>> 28) & mask; out[29+outOffset] = (curInputValue0 >>> 29) & mask; out[30+outOffset] = (curInputValue0 >>> 30) & mask; out[31+outOffset] = curInputValue0 >>> 31; outOffset += 32; } } static private void unpack2(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 3; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 2) & mask; out[2+outOffset] = (curInputValue0 >>> 4) & mask; out[3+outOffset] = (curInputValue0 >>> 6) & mask; out[4+outOffset] = (curInputValue0 >>> 8) & mask; out[5+outOffset] = (curInputValue0 >>> 10) & mask; out[6+outOffset] = (curInputValue0 >>> 12) & mask; out[7+outOffset] = (curInputValue0 >>> 14) & mask; out[8+outOffset] = (curInputValue0 >>> 16) & mask; out[9+outOffset] = (curInputValue0 >>> 18) & mask; out[10+outOffset] = (curInputValue0 >>> 20) & mask; out[11+outOffset] = (curInputValue0 >>> 22) & mask; out[12+outOffset] = (curInputValue0 >>> 24) & mask; out[13+outOffset] = (curInputValue0 >>> 26) & mask; out[14+outOffset] = (curInputValue0 >>> 28) & mask; out[15+outOffset] = curInputValue0 >>> 30; out[16+outOffset] = curInputValue1 & mask; out[17+outOffset] = (curInputValue1 >>> 2) & mask; out[18+outOffset] = (curInputValue1 >>> 4) & mask; out[19+outOffset] = (curInputValue1 >>> 6) & mask; out[20+outOffset] = (curInputValue1 >>> 8) & mask; out[21+outOffset] = (curInputValue1 >>> 10) & mask; out[22+outOffset] = (curInputValue1 >>> 12) & mask; out[23+outOffset] = (curInputValue1 >>> 14) & mask; out[24+outOffset] = (curInputValue1 >>> 16) & mask; out[25+outOffset] = (curInputValue1 >>> 18) & mask; out[26+outOffset] = (curInputValue1 >>> 20) & mask; out[27+outOffset] = (curInputValue1 >>> 22) & mask; out[28+outOffset] = (curInputValue1 >>> 24) & mask; out[29+outOffset] = (curInputValue1 >>> 26) & mask; out[30+outOffset] = (curInputValue1 >>> 28) & mask; out[31+outOffset] = curInputValue1 >>> 30; outOffset += 32; } } static private void unpack3(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 7; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 3) & mask; out[2+outOffset] = (curInputValue0 >>> 6) & mask; out[3+outOffset] = (curInputValue0 >>> 9) & mask; out[4+outOffset] = (curInputValue0 >>> 12) & mask; out[5+outOffset] = (curInputValue0 >>> 15) & mask; out[6+outOffset] = (curInputValue0 >>> 18) & mask; out[7+outOffset] = (curInputValue0 >>> 21) & mask; out[8+outOffset] = (curInputValue0 >>> 24) & mask; out[9+outOffset] = (curInputValue0 >>> 27) & mask; out[10+outOffset] = ((curInputValue0 >>> 30) | (curInputValue1 << 2)) & mask; out[11+outOffset] = (curInputValue1 >>> 1) & mask; out[12+outOffset] = (curInputValue1 >>> 4) & mask; out[13+outOffset] = (curInputValue1 >>> 7) & mask; out[14+outOffset] = (curInputValue1 >>> 10) & mask; out[15+outOffset] = (curInputValue1 >>> 13) & mask; out[16+outOffset] = (curInputValue1 >>> 16) & mask; out[17+outOffset] = (curInputValue1 >>> 19) & mask; out[18+outOffset] = (curInputValue1 >>> 22) & mask; out[19+outOffset] = (curInputValue1 >>> 25) & mask; out[20+outOffset] = (curInputValue1 >>> 28) & mask; out[21+outOffset] = ((curInputValue1 >>> 31) | (curInputValue2 << 1)) & mask; out[22+outOffset] = (curInputValue2 >>> 2) & mask; out[23+outOffset] = (curInputValue2 >>> 5) & mask; out[24+outOffset] = (curInputValue2 >>> 8) & mask; out[25+outOffset] = (curInputValue2 >>> 11) & mask; out[26+outOffset] = (curInputValue2 >>> 14) & mask; out[27+outOffset] = (curInputValue2 >>> 17) & mask; out[28+outOffset] = (curInputValue2 >>> 20) & mask; out[29+outOffset] = (curInputValue2 >>> 23) & mask; out[30+outOffset] = (curInputValue2 >>> 26) & mask; out[31+outOffset] = curInputValue2 >>> 29; outOffset += 32; } } static private void unpack4(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 15; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 4) & mask; out[2+outOffset] = (curInputValue0 >>> 8) & mask; out[3+outOffset] = (curInputValue0 >>> 12) & mask; out[4+outOffset] = (curInputValue0 >>> 16) & mask; out[5+outOffset] = (curInputValue0 >>> 20) & mask; out[6+outOffset] = (curInputValue0 >>> 24) & mask; out[7+outOffset] = curInputValue0 >>> 28; out[8+outOffset] = curInputValue1 & mask; out[9+outOffset] = (curInputValue1 >>> 4) & mask; out[10+outOffset] = (curInputValue1 >>> 8) & mask; out[11+outOffset] = (curInputValue1 >>> 12) & mask; out[12+outOffset] = (curInputValue1 >>> 16) & mask; out[13+outOffset] = (curInputValue1 >>> 20) & mask; out[14+outOffset] = (curInputValue1 >>> 24) & mask; out[15+outOffset] = curInputValue1 >>> 28; out[16+outOffset] = curInputValue2 & mask; out[17+outOffset] = (curInputValue2 >>> 4) & mask; out[18+outOffset] = (curInputValue2 >>> 8) & mask; out[19+outOffset] = (curInputValue2 >>> 12) & mask; out[20+outOffset] = (curInputValue2 >>> 16) & mask; out[21+outOffset] = (curInputValue2 >>> 20) & mask; out[22+outOffset] = (curInputValue2 >>> 24) & mask; out[23+outOffset] = curInputValue2 >>> 28; out[24+outOffset] = curInputValue3 & mask; out[25+outOffset] = (curInputValue3 >>> 4) & mask; out[26+outOffset] = (curInputValue3 >>> 8) & mask; out[27+outOffset] = (curInputValue3 >>> 12) & mask; out[28+outOffset] = (curInputValue3 >>> 16) & mask; out[29+outOffset] = (curInputValue3 >>> 20) & mask; out[30+outOffset] = (curInputValue3 >>> 24) & mask; out[31+outOffset] = curInputValue3 >>> 28; outOffset += 32; } } static private void unpack5(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 31; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 5) & mask; out[2+outOffset] = (curInputValue0 >>> 10) & mask; out[3+outOffset] = (curInputValue0 >>> 15) & mask; out[4+outOffset] = (curInputValue0 >>> 20) & mask; out[5+outOffset] = (curInputValue0 >>> 25) & mask; out[6+outOffset] = ((curInputValue0 >>> 30) | (curInputValue1 << 2)) & mask; out[7+outOffset] = (curInputValue1 >>> 3) & mask; out[8+outOffset] = (curInputValue1 >>> 8) & mask; out[9+outOffset] = (curInputValue1 >>> 13) & mask; out[10+outOffset] = (curInputValue1 >>> 18) & mask; out[11+outOffset] = (curInputValue1 >>> 23) & mask; out[12+outOffset] = ((curInputValue1 >>> 28) | (curInputValue2 << 4)) & mask; out[13+outOffset] = (curInputValue2 >>> 1) & mask; out[14+outOffset] = (curInputValue2 >>> 6) & mask; out[15+outOffset] = (curInputValue2 >>> 11) & mask; out[16+outOffset] = (curInputValue2 >>> 16) & mask; out[17+outOffset] = (curInputValue2 >>> 21) & mask; out[18+outOffset] = (curInputValue2 >>> 26) & mask; out[19+outOffset] = ((curInputValue2 >>> 31) | (curInputValue3 << 1)) & mask; out[20+outOffset] = (curInputValue3 >>> 4) & mask; out[21+outOffset] = (curInputValue3 >>> 9) & mask; out[22+outOffset] = (curInputValue3 >>> 14) & mask; out[23+outOffset] = (curInputValue3 >>> 19) & mask; out[24+outOffset] = (curInputValue3 >>> 24) & mask; out[25+outOffset] = ((curInputValue3 >>> 29) | (curInputValue4 << 3)) & mask; out[26+outOffset] = (curInputValue4 >>> 2) & mask; out[27+outOffset] = (curInputValue4 >>> 7) & mask; out[28+outOffset] = (curInputValue4 >>> 12) & mask; out[29+outOffset] = (curInputValue4 >>> 17) & mask; out[30+outOffset] = (curInputValue4 >>> 22) & mask; out[31+outOffset] = curInputValue4 >>> 27; outOffset += 32; } } static private void unpack6(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 63; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 6) & mask; out[2+outOffset] = (curInputValue0 >>> 12) & mask; out[3+outOffset] = (curInputValue0 >>> 18) & mask; out[4+outOffset] = (curInputValue0 >>> 24) & mask; out[5+outOffset] = ((curInputValue0 >>> 30) | (curInputValue1 << 2)) & mask; out[6+outOffset] = (curInputValue1 >>> 4) & mask; out[7+outOffset] = (curInputValue1 >>> 10) & mask; out[8+outOffset] = (curInputValue1 >>> 16) & mask; out[9+outOffset] = (curInputValue1 >>> 22) & mask; out[10+outOffset] = ((curInputValue1 >>> 28) | (curInputValue2 << 4)) & mask; out[11+outOffset] = (curInputValue2 >>> 2) & mask; out[12+outOffset] = (curInputValue2 >>> 8) & mask; out[13+outOffset] = (curInputValue2 >>> 14) & mask; out[14+outOffset] = (curInputValue2 >>> 20) & mask; out[15+outOffset] = curInputValue2 >>> 26; out[16+outOffset] = curInputValue3 & mask; out[17+outOffset] = (curInputValue3 >>> 6) & mask; out[18+outOffset] = (curInputValue3 >>> 12) & mask; out[19+outOffset] = (curInputValue3 >>> 18) & mask; out[20+outOffset] = (curInputValue3 >>> 24) & mask; out[21+outOffset] = ((curInputValue3 >>> 30) | (curInputValue4 << 2)) & mask; out[22+outOffset] = (curInputValue4 >>> 4) & mask; out[23+outOffset] = (curInputValue4 >>> 10) & mask; out[24+outOffset] = (curInputValue4 >>> 16) & mask; out[25+outOffset] = (curInputValue4 >>> 22) & mask; out[26+outOffset] = ((curInputValue4 >>> 28) | (curInputValue5 << 4)) & mask; out[27+outOffset] = (curInputValue5 >>> 2) & mask; out[28+outOffset] = (curInputValue5 >>> 8) & mask; out[29+outOffset] = (curInputValue5 >>> 14) & mask; out[30+outOffset] = (curInputValue5 >>> 20) & mask; out[31+outOffset] = curInputValue5 >>> 26; outOffset += 32; } } static private void unpack7(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 127; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 7) & mask; out[2+outOffset] = (curInputValue0 >>> 14) & mask; out[3+outOffset] = (curInputValue0 >>> 21) & mask; out[4+outOffset] = ((curInputValue0 >>> 28) | (curInputValue1 << 4)) & mask; out[5+outOffset] = (curInputValue1 >>> 3) & mask; out[6+outOffset] = (curInputValue1 >>> 10) & mask; out[7+outOffset] = (curInputValue1 >>> 17) & mask; out[8+outOffset] = (curInputValue1 >>> 24) & mask; out[9+outOffset] = ((curInputValue1 >>> 31) | (curInputValue2 << 1)) & mask; out[10+outOffset] = (curInputValue2 >>> 6) & mask; out[11+outOffset] = (curInputValue2 >>> 13) & mask; out[12+outOffset] = (curInputValue2 >>> 20) & mask; out[13+outOffset] = ((curInputValue2 >>> 27) | (curInputValue3 << 5)) & mask; out[14+outOffset] = (curInputValue3 >>> 2) & mask; out[15+outOffset] = (curInputValue3 >>> 9) & mask; out[16+outOffset] = (curInputValue3 >>> 16) & mask; out[17+outOffset] = (curInputValue3 >>> 23) & mask; out[18+outOffset] = ((curInputValue3 >>> 30) | (curInputValue4 << 2)) & mask; out[19+outOffset] = (curInputValue4 >>> 5) & mask; out[20+outOffset] = (curInputValue4 >>> 12) & mask; out[21+outOffset] = (curInputValue4 >>> 19) & mask; out[22+outOffset] = ((curInputValue4 >>> 26) | (curInputValue5 << 6)) & mask; out[23+outOffset] = (curInputValue5 >>> 1) & mask; out[24+outOffset] = (curInputValue5 >>> 8) & mask; out[25+outOffset] = (curInputValue5 >>> 15) & mask; out[26+outOffset] = (curInputValue5 >>> 22) & mask; out[27+outOffset] = ((curInputValue5 >>> 29) | (curInputValue6 << 3)) & mask; out[28+outOffset] = (curInputValue6 >>> 4) & mask; out[29+outOffset] = (curInputValue6 >>> 11) & mask; out[30+outOffset] = (curInputValue6 >>> 18) & mask; out[31+outOffset] = curInputValue6 >>> 25; outOffset += 32; } } static private void unpack8(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 255; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 8) & mask; out[2+outOffset] = (curInputValue0 >>> 16) & mask; out[3+outOffset] = curInputValue0 >>> 24; out[4+outOffset] = curInputValue1 & mask; out[5+outOffset] = (curInputValue1 >>> 8) & mask; out[6+outOffset] = (curInputValue1 >>> 16) & mask; out[7+outOffset] = curInputValue1 >>> 24; out[8+outOffset] = curInputValue2 & mask; out[9+outOffset] = (curInputValue2 >>> 8) & mask; out[10+outOffset] = (curInputValue2 >>> 16) & mask; out[11+outOffset] = curInputValue2 >>> 24; out[12+outOffset] = curInputValue3 & mask; out[13+outOffset] = (curInputValue3 >>> 8) & mask; out[14+outOffset] = (curInputValue3 >>> 16) & mask; out[15+outOffset] = curInputValue3 >>> 24; out[16+outOffset] = curInputValue4 & mask; out[17+outOffset] = (curInputValue4 >>> 8) & mask; out[18+outOffset] = (curInputValue4 >>> 16) & mask; out[19+outOffset] = curInputValue4 >>> 24; out[20+outOffset] = curInputValue5 & mask; out[21+outOffset] = (curInputValue5 >>> 8) & mask; out[22+outOffset] = (curInputValue5 >>> 16) & mask; out[23+outOffset] = curInputValue5 >>> 24; out[24+outOffset] = curInputValue6 & mask; out[25+outOffset] = (curInputValue6 >>> 8) & mask; out[26+outOffset] = (curInputValue6 >>> 16) & mask; out[27+outOffset] = curInputValue6 >>> 24; out[28+outOffset] = curInputValue7 & mask; out[29+outOffset] = (curInputValue7 >>> 8) & mask; out[30+outOffset] = (curInputValue7 >>> 16) & mask; out[31+outOffset] = curInputValue7 >>> 24; outOffset += 32; } } static private void unpack9(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 511; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 9) & mask; out[2+outOffset] = (curInputValue0 >>> 18) & mask; out[3+outOffset] = ((curInputValue0 >>> 27) | (curInputValue1 << 5)) & mask; out[4+outOffset] = (curInputValue1 >>> 4) & mask; out[5+outOffset] = (curInputValue1 >>> 13) & mask; out[6+outOffset] = (curInputValue1 >>> 22) & mask; out[7+outOffset] = ((curInputValue1 >>> 31) | (curInputValue2 << 1)) & mask; out[8+outOffset] = (curInputValue2 >>> 8) & mask; out[9+outOffset] = (curInputValue2 >>> 17) & mask; out[10+outOffset] = ((curInputValue2 >>> 26) | (curInputValue3 << 6)) & mask; out[11+outOffset] = (curInputValue3 >>> 3) & mask; out[12+outOffset] = (curInputValue3 >>> 12) & mask; out[13+outOffset] = (curInputValue3 >>> 21) & mask; out[14+outOffset] = ((curInputValue3 >>> 30) | (curInputValue4 << 2)) & mask; out[15+outOffset] = (curInputValue4 >>> 7) & mask; out[16+outOffset] = (curInputValue4 >>> 16) & mask; out[17+outOffset] = ((curInputValue4 >>> 25) | (curInputValue5 << 7)) & mask; out[18+outOffset] = (curInputValue5 >>> 2) & mask; out[19+outOffset] = (curInputValue5 >>> 11) & mask; out[20+outOffset] = (curInputValue5 >>> 20) & mask; out[21+outOffset] = ((curInputValue5 >>> 29) | (curInputValue6 << 3)) & mask; out[22+outOffset] = (curInputValue6 >>> 6) & mask; out[23+outOffset] = (curInputValue6 >>> 15) & mask; out[24+outOffset] = ((curInputValue6 >>> 24) | (curInputValue7 << 8)) & mask; out[25+outOffset] = (curInputValue7 >>> 1) & mask; out[26+outOffset] = (curInputValue7 >>> 10) & mask; out[27+outOffset] = (curInputValue7 >>> 19) & mask; out[28+outOffset] = ((curInputValue7 >>> 28) | (curInputValue8 << 4)) & mask; out[29+outOffset] = (curInputValue8 >>> 5) & mask; out[30+outOffset] = (curInputValue8 >>> 14) & mask; out[31+outOffset] = curInputValue8 >>> 23; outOffset += 32; } } static private void unpack10(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 1023; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 10) & mask; out[2+outOffset] = (curInputValue0 >>> 20) & mask; out[3+outOffset] = ((curInputValue0 >>> 30) | (curInputValue1 << 2)) & mask; out[4+outOffset] = (curInputValue1 >>> 8) & mask; out[5+outOffset] = (curInputValue1 >>> 18) & mask; out[6+outOffset] = ((curInputValue1 >>> 28) | (curInputValue2 << 4)) & mask; out[7+outOffset] = (curInputValue2 >>> 6) & mask; out[8+outOffset] = (curInputValue2 >>> 16) & mask; out[9+outOffset] = ((curInputValue2 >>> 26) | (curInputValue3 << 6)) & mask; out[10+outOffset] = (curInputValue3 >>> 4) & mask; out[11+outOffset] = (curInputValue3 >>> 14) & mask; out[12+outOffset] = ((curInputValue3 >>> 24) | (curInputValue4 << 8)) & mask; out[13+outOffset] = (curInputValue4 >>> 2) & mask; out[14+outOffset] = (curInputValue4 >>> 12) & mask; out[15+outOffset] = curInputValue4 >>> 22; out[16+outOffset] = curInputValue5 & mask; out[17+outOffset] = (curInputValue5 >>> 10) & mask; out[18+outOffset] = (curInputValue5 >>> 20) & mask; out[19+outOffset] = ((curInputValue5 >>> 30) | (curInputValue6 << 2)) & mask; out[20+outOffset] = (curInputValue6 >>> 8) & mask; out[21+outOffset] = (curInputValue6 >>> 18) & mask; out[22+outOffset] = ((curInputValue6 >>> 28) | (curInputValue7 << 4)) & mask; out[23+outOffset] = (curInputValue7 >>> 6) & mask; out[24+outOffset] = (curInputValue7 >>> 16) & mask; out[25+outOffset] = ((curInputValue7 >>> 26) | (curInputValue8 << 6)) & mask; out[26+outOffset] = (curInputValue8 >>> 4) & mask; out[27+outOffset] = (curInputValue8 >>> 14) & mask; out[28+outOffset] = ((curInputValue8 >>> 24) | (curInputValue9 << 8)) & mask; out[29+outOffset] = (curInputValue9 >>> 2) & mask; out[30+outOffset] = (curInputValue9 >>> 12) & mask; out[31+outOffset] = curInputValue9 >>> 22; outOffset += 32; } } static private void unpack11(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 2047; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 11) & mask; out[2+outOffset] = ((curInputValue0 >>> 22) | (curInputValue1 << 10)) & mask; out[3+outOffset] = (curInputValue1 >>> 1) & mask; out[4+outOffset] = (curInputValue1 >>> 12) & mask; out[5+outOffset] = ((curInputValue1 >>> 23) | (curInputValue2 << 9)) & mask; out[6+outOffset] = (curInputValue2 >>> 2) & mask; out[7+outOffset] = (curInputValue2 >>> 13) & mask; out[8+outOffset] = ((curInputValue2 >>> 24) | (curInputValue3 << 8)) & mask; out[9+outOffset] = (curInputValue3 >>> 3) & mask; out[10+outOffset] = (curInputValue3 >>> 14) & mask; out[11+outOffset] = ((curInputValue3 >>> 25) | (curInputValue4 << 7)) & mask; out[12+outOffset] = (curInputValue4 >>> 4) & mask; out[13+outOffset] = (curInputValue4 >>> 15) & mask; out[14+outOffset] = ((curInputValue4 >>> 26) | (curInputValue5 << 6)) & mask; out[15+outOffset] = (curInputValue5 >>> 5) & mask; out[16+outOffset] = (curInputValue5 >>> 16) & mask; out[17+outOffset] = ((curInputValue5 >>> 27) | (curInputValue6 << 5)) & mask; out[18+outOffset] = (curInputValue6 >>> 6) & mask; out[19+outOffset] = (curInputValue6 >>> 17) & mask; out[20+outOffset] = ((curInputValue6 >>> 28) | (curInputValue7 << 4)) & mask; out[21+outOffset] = (curInputValue7 >>> 7) & mask; out[22+outOffset] = (curInputValue7 >>> 18) & mask; out[23+outOffset] = ((curInputValue7 >>> 29) | (curInputValue8 << 3)) & mask; out[24+outOffset] = (curInputValue8 >>> 8) & mask; out[25+outOffset] = (curInputValue8 >>> 19) & mask; out[26+outOffset] = ((curInputValue8 >>> 30) | (curInputValue9 << 2)) & mask; out[27+outOffset] = (curInputValue9 >>> 9) & mask; out[28+outOffset] = (curInputValue9 >>> 20) & mask; out[29+outOffset] = ((curInputValue9 >>> 31) | (curInputValue10 << 1)) & mask; out[30+outOffset] = (curInputValue10 >>> 10) & mask; out[31+outOffset] = curInputValue10 >>> 21; outOffset += 32; } } static private void unpack12(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 4095; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); int curInputValue11 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 12) & mask; out[2+outOffset] = ((curInputValue0 >>> 24) | (curInputValue1 << 8)) & mask; out[3+outOffset] = (curInputValue1 >>> 4) & mask; out[4+outOffset] = (curInputValue1 >>> 16) & mask; out[5+outOffset] = ((curInputValue1 >>> 28) | (curInputValue2 << 4)) & mask; out[6+outOffset] = (curInputValue2 >>> 8) & mask; out[7+outOffset] = curInputValue2 >>> 20; out[8+outOffset] = curInputValue3 & mask; out[9+outOffset] = (curInputValue3 >>> 12) & mask; out[10+outOffset] = ((curInputValue3 >>> 24) | (curInputValue4 << 8)) & mask; out[11+outOffset] = (curInputValue4 >>> 4) & mask; out[12+outOffset] = (curInputValue4 >>> 16) & mask; out[13+outOffset] = ((curInputValue4 >>> 28) | (curInputValue5 << 4)) & mask; out[14+outOffset] = (curInputValue5 >>> 8) & mask; out[15+outOffset] = curInputValue5 >>> 20; out[16+outOffset] = curInputValue6 & mask; out[17+outOffset] = (curInputValue6 >>> 12) & mask; out[18+outOffset] = ((curInputValue6 >>> 24) | (curInputValue7 << 8)) & mask; out[19+outOffset] = (curInputValue7 >>> 4) & mask; out[20+outOffset] = (curInputValue7 >>> 16) & mask; out[21+outOffset] = ((curInputValue7 >>> 28) | (curInputValue8 << 4)) & mask; out[22+outOffset] = (curInputValue8 >>> 8) & mask; out[23+outOffset] = curInputValue8 >>> 20; out[24+outOffset] = curInputValue9 & mask; out[25+outOffset] = (curInputValue9 >>> 12) & mask; out[26+outOffset] = ((curInputValue9 >>> 24) | (curInputValue10 << 8)) & mask; out[27+outOffset] = (curInputValue10 >>> 4) & mask; out[28+outOffset] = (curInputValue10 >>> 16) & mask; out[29+outOffset] = ((curInputValue10 >>> 28) | (curInputValue11 << 4)) & mask; out[30+outOffset] = (curInputValue11 >>> 8) & mask; out[31+outOffset] = curInputValue11 >>> 20; outOffset += 32; } } static private void unpack13(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 8191; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); int curInputValue11 = in.get(); int curInputValue12 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = (curInputValue0 >>> 13) & mask; out[2+outOffset] = ((curInputValue0 >>> 26) | (curInputValue1 << 6)) & mask; out[3+outOffset] = (curInputValue1 >>> 7) & mask; out[4+outOffset] = ((curInputValue1 >>> 20) | (curInputValue2 << 12)) & mask; out[5+outOffset] = (curInputValue2 >>> 1) & mask; out[6+outOffset] = (curInputValue2 >>> 14) & mask; out[7+outOffset] = ((curInputValue2 >>> 27) | (curInputValue3 << 5)) & mask; out[8+outOffset] = (curInputValue3 >>> 8) & mask; out[9+outOffset] = ((curInputValue3 >>> 21) | (curInputValue4 << 11)) & mask; out[10+outOffset] = (curInputValue4 >>> 2) & mask; out[11+outOffset] = (curInputValue4 >>> 15) & mask; out[12+outOffset] = ((curInputValue4 >>> 28) | (curInputValue5 << 4)) & mask; out[13+outOffset] = (curInputValue5 >>> 9) & mask; out[14+outOffset] = ((curInputValue5 >>> 22) | (curInputValue6 << 10)) & mask; out[15+outOffset] = (curInputValue6 >>> 3) & mask; out[16+outOffset] = (curInputValue6 >>> 16) & mask; out[17+outOffset] = ((curInputValue6 >>> 29) | (curInputValue7 << 3)) & mask; out[18+outOffset] = (curInputValue7 >>> 10) & mask; out[19+outOffset] = ((curInputValue7 >>> 23) | (curInputValue8 << 9)) & mask; out[20+outOffset] = (curInputValue8 >>> 4) & mask; out[21+outOffset] = (curInputValue8 >>> 17) & mask; out[22+outOffset] = ((curInputValue8 >>> 30) | (curInputValue9 << 2)) & mask; out[23+outOffset] = (curInputValue9 >>> 11) & mask; out[24+outOffset] = ((curInputValue9 >>> 24) | (curInputValue10 << 8)) & mask; out[25+outOffset] = (curInputValue10 >>> 5) & mask; out[26+outOffset] = (curInputValue10 >>> 18) & mask; out[27+outOffset] = ((curInputValue10 >>> 31) | (curInputValue11 << 1)) & mask; out[28+outOffset] = (curInputValue11 >>> 12) & mask; out[29+outOffset] = ((curInputValue11 >>> 25) | (curInputValue12 << 7)) & mask; out[30+outOffset] = (curInputValue12 >>> 6) & mask; out[31+outOffset] = curInputValue12 >>> 19; outOffset += 32; } } static private void unpack16(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 65535; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); int curInputValue11 = in.get(); int curInputValue12 = in.get(); int curInputValue13 = in.get(); int curInputValue14 = in.get(); int curInputValue15 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = curInputValue0 >>> 16; out[2+outOffset] = curInputValue1 & mask; out[3+outOffset] = curInputValue1 >>> 16; out[4+outOffset] = curInputValue2 & mask; out[5+outOffset] = curInputValue2 >>> 16; out[6+outOffset] = curInputValue3 & mask; out[7+outOffset] = curInputValue3 >>> 16; out[8+outOffset] = curInputValue4 & mask; out[9+outOffset] = curInputValue4 >>> 16; out[10+outOffset] = curInputValue5 & mask; out[11+outOffset] = curInputValue5 >>> 16; out[12+outOffset] = curInputValue6 & mask; out[13+outOffset] = curInputValue6 >>> 16; out[14+outOffset] = curInputValue7 & mask; out[15+outOffset] = curInputValue7 >>> 16; out[16+outOffset] = curInputValue8 & mask; out[17+outOffset] = curInputValue8 >>> 16; out[18+outOffset] = curInputValue9 & mask; out[19+outOffset] = curInputValue9 >>> 16; out[20+outOffset] = curInputValue10 & mask; out[21+outOffset] = curInputValue10 >>> 16; out[22+outOffset] = curInputValue11 & mask; out[23+outOffset] = curInputValue11 >>> 16; out[24+outOffset] = curInputValue12 & mask; out[25+outOffset] = curInputValue12 >>> 16; out[26+outOffset] = curInputValue13 & mask; out[27+outOffset] = curInputValue13 >>> 16; out[28+outOffset] = curInputValue14 & mask; out[29+outOffset] = curInputValue14 >>> 16; out[30+outOffset] = curInputValue15 & mask; out[31+outOffset] = curInputValue15 >>> 16; outOffset += 32; } } static private void unpack20(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 1048575; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); int curInputValue11 = in.get(); int curInputValue12 = in.get(); int curInputValue13 = in.get(); int curInputValue14 = in.get(); int curInputValue15 = in.get(); int curInputValue16 = in.get(); int curInputValue17 = in.get(); int curInputValue18 = in.get(); int curInputValue19 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = ((curInputValue0 >>> 20) | (curInputValue1 << 12)) & mask; out[2+outOffset] = (curInputValue1 >>> 8) & mask; out[3+outOffset] = ((curInputValue1 >>> 28) | (curInputValue2 << 4)) & mask; out[4+outOffset] = ((curInputValue2 >>> 16) | (curInputValue3 << 16)) & mask; out[5+outOffset] = (curInputValue3 >>> 4) & mask; out[6+outOffset] = ((curInputValue3 >>> 24) | (curInputValue4 << 8)) & mask; out[7+outOffset] = curInputValue4 >>> 12; out[8+outOffset] = curInputValue5 & mask; out[9+outOffset] = ((curInputValue5 >>> 20) | (curInputValue6 << 12)) & mask; out[10+outOffset] = (curInputValue6 >>> 8) & mask; out[11+outOffset] = ((curInputValue6 >>> 28) | (curInputValue7 << 4)) & mask; out[12+outOffset] = ((curInputValue7 >>> 16) | (curInputValue8 << 16)) & mask; out[13+outOffset] = (curInputValue8 >>> 4) & mask; out[14+outOffset] = ((curInputValue8 >>> 24) | (curInputValue9 << 8)) & mask; out[15+outOffset] = curInputValue9 >>> 12; out[16+outOffset] = curInputValue10 & mask; out[17+outOffset] = ((curInputValue10 >>> 20) | (curInputValue11 << 12)) & mask; out[18+outOffset] = (curInputValue11 >>> 8) & mask; out[19+outOffset] = ((curInputValue11 >>> 28) | (curInputValue12 << 4)) & mask; out[20+outOffset] = ((curInputValue12 >>> 16) | (curInputValue13 << 16)) & mask; out[21+outOffset] = (curInputValue13 >>> 4) & mask; out[22+outOffset] = ((curInputValue13 >>> 24) | (curInputValue14 << 8)) & mask; out[23+outOffset] = curInputValue14 >>> 12; out[24+outOffset] = curInputValue15 & mask; out[25+outOffset] = ((curInputValue15 >>> 20) | (curInputValue16 << 12)) & mask; out[26+outOffset] = (curInputValue16 >>> 8) & mask; out[27+outOffset] = ((curInputValue16 >>> 28) | (curInputValue17 << 4)) & mask; out[28+outOffset] = ((curInputValue17 >>> 16) | (curInputValue18 << 16)) & mask; out[29+outOffset] = (curInputValue18 >>> 4) & mask; out[30+outOffset] = ((curInputValue18 >>> 24) | (curInputValue19 << 8)) & mask; out[31+outOffset] = curInputValue19 >>> 12; outOffset += 32; } } static private void unpack28(int[] out, IntBuffer in) { int outOffset = 0; final int mask = 268435455; for(int i=0; i<4; ++i){ int curInputValue0 = in.get(); int curInputValue1 = in.get(); int curInputValue2 = in.get(); int curInputValue3 = in.get(); int curInputValue4 = in.get(); int curInputValue5 = in.get(); int curInputValue6 = in.get(); int curInputValue7 = in.get(); int curInputValue8 = in.get(); int curInputValue9 = in.get(); int curInputValue10 = in.get(); int curInputValue11 = in.get(); int curInputValue12 = in.get(); int curInputValue13 = in.get(); int curInputValue14 = in.get(); int curInputValue15 = in.get(); int curInputValue16 = in.get(); int curInputValue17 = in.get(); int curInputValue18 = in.get(); int curInputValue19 = in.get(); int curInputValue20 = in.get(); int curInputValue21 = in.get(); int curInputValue22 = in.get(); int curInputValue23 = in.get(); int curInputValue24 = in.get(); int curInputValue25 = in.get(); int curInputValue26 = in.get(); int curInputValue27 = in.get(); out[0+outOffset] = curInputValue0 & mask; out[1+outOffset] = ((curInputValue0 >>> 28) | (curInputValue1 << 4)) & mask; out[2+outOffset] = ((curInputValue1 >>> 24) | (curInputValue2 << 8)) & mask; out[3+outOffset] = ((curInputValue2 >>> 20) | (curInputValue3 << 12)) & mask; out[4+outOffset] = ((curInputValue3 >>> 16) | (curInputValue4 << 16)) & mask; out[5+outOffset] = ((curInputValue4 >>> 12) | (curInputValue5 << 20)) & mask; out[6+outOffset] = ((curInputValue5 >>> 8) | (curInputValue6 << 24)) & mask; out[7+outOffset] = curInputValue6 >>> 4; out[8+outOffset] = curInputValue7 & mask; out[9+outOffset] = ((curInputValue7 >>> 28) | (curInputValue8 << 4)) & mask; out[10+outOffset] = ((curInputValue8 >>> 24) | (curInputValue9 << 8)) & mask; out[11+outOffset] = ((curInputValue9 >>> 20) | (curInputValue10 << 12)) & mask; out[12+outOffset] = ((curInputValue10 >>> 16) | (curInputValue11 << 16)) & mask; out[13+outOffset] = ((curInputValue11 >>> 12) | (curInputValue12 << 20)) & mask; out[14+outOffset] = ((curInputValue12 >>> 8) | (curInputValue13 << 24)) & mask; out[15+outOffset] = curInputValue13 >>> 4; out[16+outOffset] = curInputValue14 & mask; out[17+outOffset] = ((curInputValue14 >>> 28) | (curInputValue15 << 4)) & mask; out[18+outOffset] = ((curInputValue15 >>> 24) | (curInputValue16 << 8)) & mask; out[19+outOffset] = ((curInputValue16 >>> 20) | (curInputValue17 << 12)) & mask; out[20+outOffset] = ((curInputValue17 >>> 16) | (curInputValue18 << 16)) & mask; out[21+outOffset] = ((curInputValue18 >>> 12) | (curInputValue19 << 20)) & mask; out[22+outOffset] = ((curInputValue19 >>> 8) | (curInputValue20 << 24)) & mask; out[23+outOffset] = curInputValue20 >>> 4; out[24+outOffset] = curInputValue21 & mask; out[25+outOffset] = ((curInputValue21 >>> 28) | (curInputValue22 << 4)) & mask; out[26+outOffset] = ((curInputValue22 >>> 24) | (curInputValue23 << 8)) & mask; out[27+outOffset] = ((curInputValue23 >>> 20) | (curInputValue24 << 12)) & mask; out[28+outOffset] = ((curInputValue24 >>> 16) | (curInputValue25 << 16)) & mask; out[29+outOffset] = ((curInputValue25 >>> 12) | (curInputValue26 << 20)) & mask; out[30+outOffset] = ((curInputValue26 >>> 8) | (curInputValue27 << 24)) & mask; out[31+outOffset] = curInputValue27 >>> 4; outOffset += 32; } } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.uif.view; import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.kim.api.KimConstants; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.krad.bo.DataObjectAuthorizerBase; import org.kuali.rice.krad.datadictionary.AttributeSecurity; import org.kuali.rice.krad.datadictionary.parse.BeanTag; import org.kuali.rice.krad.datadictionary.parse.BeanTagAttribute; import org.kuali.rice.krad.uif.component.Component; import org.kuali.rice.krad.uif.component.ComponentSecurity; import org.kuali.rice.krad.uif.component.DataBinding; import org.kuali.rice.krad.uif.container.CollectionGroup; import org.kuali.rice.krad.uif.container.Group; import org.kuali.rice.krad.uif.element.Action; import org.kuali.rice.krad.uif.field.DataField; import org.kuali.rice.krad.uif.field.DataFieldSecurity; import org.kuali.rice.krad.uif.field.Field; import org.kuali.rice.krad.uif.field.FieldSecurity; import org.kuali.rice.krad.uif.util.ObjectPropertyUtils; import org.kuali.rice.krad.uif.widget.Widget; import org.kuali.rice.krad.util.KRADConstants; import org.kuali.rice.krad.util.KRADUtils; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Implementation of {@link ViewAuthorizer} that verifies authorization with KIM permission checks * * <p> * Each permission goes through one of the isAuthorized methods provided by * {@link org.kuali.rice.krad.bo.DataObjectAuthorizer}, these in turn call {@link #addPermissionDetails(Object, java.util.Map)} * and {@link #addRoleQualification(Object, java.util.Map)} for building the permission and role maps to send with * the permission check. Subclasses can override these methods to add additional attributes * </p> * * @author Kuali Rice Team (rice.collab@kuali.org) */ @BeanTag(name = "viewAuthorizer") public class ViewAuthorizerBase extends DataObjectAuthorizerBase implements ViewAuthorizer { private static final long serialVersionUID = -2687378084630965412L; private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(ViewAuthorizerBase.class); private ConfigurationService configurationService; private RequestAuthorizationCache requestAuthorizationCache; /** * @see ViewAuthorizer#getActionFlags(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.kim.api.identity.Person, java.util.Set<java.lang.String>) */ public Set<String> getActionFlags(View view, ViewModel model, Person user, Set<String> actions) { if (actions.contains(KRADConstants.KUALI_ACTION_CAN_EDIT) && !canEditView(view, model, user)) { actions.remove(KRADConstants.KUALI_ACTION_CAN_EDIT); } return actions; } /** * @see ViewAuthorizer#getEditModes(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.kim.api.identity.Person, java.util.Set<java.lang.String>) */ public Set<String> getEditModes(View view, ViewModel model, Person user, Set<String> editModes) { Set<String> unauthorizedEditModes = new HashSet<String>(); Object dataObjectForContext = getDataObjectContext(view, model); // loop through supplied editModes and make sure KIM permission exists for (String editMode : editModes) { Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.EDIT_MODE, editMode); additionalPermissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, view.getId()); boolean exists = permissionExistsByTemplate(dataObjectForContext, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.USE_VIEW, additionalPermissionDetails); if (exists) { boolean authorized = isAuthorizedByTemplate(dataObjectForContext, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.USE_VIEW, user.getPrincipalId(), additionalPermissionDetails, null); if (!authorized) { unauthorizedEditModes.add(editMode); } } } editModes.removeAll(unauthorizedEditModes); return editModes; } /** * Checks for an open view permission for the view id, and if found verifies the user has that permission * * @see ViewAuthorizer#canOpenView(View, ViewModel, org.kuali.rice.kim.api.identity.Person) */ public boolean canOpenView(View view, ViewModel model, Person user) { Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); additionalPermissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, model.getViewId()); if (permissionExistsByTemplate(model, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.OPEN_VIEW, additionalPermissionDetails)) { return isAuthorizedByTemplate(model, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.OPEN_VIEW, user.getPrincipalId(), additionalPermissionDetails, null); } return true; } /** * Checks for an edit view permission for the view id, and if found verifies the user has that permission * * @see ViewAuthorizer#canEditView(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.kim.api.identity.Person) */ public boolean canEditView(View view, ViewModel model, Person user) { Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); additionalPermissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, model.getViewId()); if (permissionExistsByTemplate(model, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.EDIT_VIEW, additionalPermissionDetails)) { return isAuthorizedByTemplate(model, KRADConstants.KRAD_NAMESPACE, KimConstants.PermissionTemplateNames.EDIT_VIEW, user.getPrincipalId(), additionalPermissionDetails, null); } return true; } /** * @see ViewAuthorizer#canUnmaskField(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.field.DataField, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canUnmaskField(View view, ViewModel model, DataField field, String propertyName, Person user) { if (field.getDataFieldSecurity() == null) { return true; } // check mask authz flag is set AttributeSecurity attributeSecurity = field.getDataFieldSecurity().getAttributeSecurity(); if (attributeSecurity == null || !attributeSecurity.isMask()) { return true; } // for non-production environments the ability to unmask can be disabled by a system parameter if (isNonProductionEnvAndUnmaskingTurnedOff()) { return false; } Object dataObjectForContext = getDataObjectContext(view, model); Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails = KRADUtils.getNamespaceAndComponentSimpleName(dataObjectForContext.getClass()); permissionDetails.put(KimConstants.AttributeConstants.PROPERTY_NAME, propertyName); // TODO: check for namespace, component, attribute override on attribute security if (field.getComponentSecurity().getAdditionalPermissionDetails() != null) { permissionDetails.putAll(field.getComponentSecurity().getAdditionalPermissionDetails()); } Map<String, String> roleQualifications = new HashMap<String, String>(); if (field.getComponentSecurity().getAdditionalRoleQualifiers() != null) { roleQualifications.putAll(field.getComponentSecurity().getAdditionalRoleQualifiers()); } return isAuthorizedByTemplate(dataObjectForContext, KRADConstants.KNS_NAMESPACE, KimConstants.PermissionTemplateNames.FULL_UNMASK_FIELD, user.getPrincipalId(), permissionDetails, roleQualifications); } /** * @see ViewAuthorizer#canPartialUnmaskField(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.field.DataField, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canPartialUnmaskField(View view, ViewModel model, DataField field, String propertyName, Person user) { if (field.getDataFieldSecurity() == null) { return true; } // check partial mask authz flag is set AttributeSecurity attributeSecurity = field.getDataFieldSecurity().getAttributeSecurity(); if (attributeSecurity == null || !attributeSecurity.isPartialMask()) { return true; } // for non-production environments the ability to unmask can be disabled by a system parameter if (isNonProductionEnvAndUnmaskingTurnedOff()) { return false; } Object dataObjectForContext = getDataObjectContext(view, model); Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails = KRADUtils.getNamespaceAndComponentSimpleName(dataObjectForContext.getClass()); permissionDetails.put(KimConstants.AttributeConstants.PROPERTY_NAME, propertyName); // TODO: check for namespace, component, attribute override on attribute security if (field.getComponentSecurity().getAdditionalPermissionDetails() != null) { permissionDetails.putAll(field.getComponentSecurity().getAdditionalPermissionDetails()); } Map<String, String> roleQualifications = new HashMap<String, String>(); if (field.getComponentSecurity().getAdditionalRoleQualifiers() != null) { roleQualifications.putAll(field.getComponentSecurity().getAdditionalRoleQualifiers()); } return isAuthorizedByTemplate(dataObjectForContext, KRADConstants.KNS_NAMESPACE, KimConstants.PermissionTemplateNames.PARTIAL_UNMASK_FIELD, user.getPrincipalId(), permissionDetails, roleQualifications); } /** * @see ViewAuthorizer#canEditField(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.field.Field, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canEditField(View view, ViewModel model, Field field, String propertyName, Person user) { ComponentSecurity componentSecurity = field.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // first check hide flag is set (lower precedence) if (componentSecurity.isEditAuthz() == null && !isDataFieldAttributeSecurityHide(field)) { return true; } // then check edit authz is set (higher precedence) if (componentSecurity.isEditAuthz() != null && !componentSecurity.isEditAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, field, model, KimConstants.PermissionTemplateNames.EDIT_FIELD, user, null, null, false); } /** * @see ViewAuthorizer#canViewField(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.field.Field, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canViewField(View view, ViewModel model, Field field, String propertyName, Person user) { ComponentSecurity componentSecurity = field.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // first check hide flag is set (lower precedence) if (componentSecurity.isViewAuthz() == null && !isDataFieldAttributeSecurityHide(field)) { return true; } // then check view authz is set (higher precedence) if (componentSecurity.isViewAuthz() != null && !componentSecurity.isViewAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, field, model, KimConstants.PermissionTemplateNames.VIEW_FIELD, user, null, null, false); } /** * @see ViewAuthorizer#canEditGroup(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.container.Group, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canEditGroup(View view, ViewModel model, Group group, String groupId, Person user) { ComponentSecurity componentSecurity = group.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // check edit group authz flag is set if (componentSecurity.isEditAuthz() == null || !componentSecurity.isEditAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, group, model, KimConstants.PermissionTemplateNames.EDIT_GROUP, user, null, null, false); } /** * @see ViewAuthorizer#canViewGroup(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.container.Group, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canViewGroup(View view, ViewModel model, Group group, String groupId, Person user) { ComponentSecurity componentSecurity = group.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // check view group authz flag is set if (componentSecurity.isViewAuthz() == null || !componentSecurity.isViewAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, group, model, KimConstants.PermissionTemplateNames.VIEW_GROUP, user, null, null, false); } /** * @see ViewAuthorizer#canEditWidget(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.widget.Widget, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canEditWidget(View view, ViewModel model, Widget widget, String widgetId, Person user) { ComponentSecurity componentSecurity = widget.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // check edit widget authz flag is set if (componentSecurity.isEditAuthz() == null || !componentSecurity.isEditAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, widget, model, KimConstants.PermissionTemplateNames.EDIT_WIDGET, user, null, null, false); } /** * @see ViewAuthorizer#canViewWidget(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.widget.Widget, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canViewWidget(View view, ViewModel model, Widget widget, String widgetId, Person user) { ComponentSecurity componentSecurity = widget.getComponentSecurity(); // check component security exists if (componentSecurity == null) { return true; } // check view widget authz flag is set if (componentSecurity.isViewAuthz() == null || !componentSecurity.isViewAuthz().booleanValue()) { return true; } return isAuthorizedByTemplate(view, widget, model, KimConstants.PermissionTemplateNames.VIEW_WIDGET, user, null, null, false); } /** * @see ViewAuthorizer#canPerformAction(org.kuali.rice.krad.uif.view.View, org.kuali.rice.krad.uif.view.ViewModel, * org.kuali.rice.krad.uif.element.Action, java.lang.String, java.lang.String, org.kuali.rice.kim.api.identity.Person) */ public boolean canPerformAction(View view, ViewModel model, Action action, String actionEvent, String actionId, Person user) { // check action authz flag is set if ((action.getActionSecurity() == null) || !action.getActionSecurity().isPerformActionAuthz()) { return true; } Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); if (StringUtils.isNotBlank(actionEvent)) { additionalPermissionDetails.put(KimConstants.AttributeConstants.ACTION_EVENT, actionEvent); } return isAuthorizedByTemplate(view, action, model, KimConstants.PermissionTemplateNames.PERFORM_ACTION, user, additionalPermissionDetails, null, false); } public boolean canEditLine(View view, ViewModel model, CollectionGroup collectionGroup, String collectionPropertyName, Object line, Person user) { // check edit line authz flag is set if ((collectionGroup.getCollectionGroupSecurity() == null) || !collectionGroup.getCollectionGroupSecurity() .isEditLineAuthz()) { return true; } return isAuthorizedByTemplate(view, collectionGroup, model, KimConstants.PermissionTemplateNames.EDIT_LINE, user, null, null, false); } public boolean canViewLine(View view, ViewModel model, CollectionGroup collectionGroup, String collectionPropertyName, Object line, Person user) { // check view line authz flag is set if ((collectionGroup.getCollectionGroupSecurity() == null) || !collectionGroup.getCollectionGroupSecurity() .isViewLineAuthz()) { return true; } return isAuthorizedByTemplate(view, collectionGroup, model, KimConstants.PermissionTemplateNames.VIEW_LINE, user, null, null, false); } public boolean canEditLineField(View view, ViewModel model, CollectionGroup collectionGroup, String collectionPropertyName, Object line, Field field, String propertyName, Person user) { FieldSecurity fieldSecurity = field.getFieldSecurity(); // check field security exists if (fieldSecurity == null) { return true; } // first check hide flag is set (lower precedence) if (fieldSecurity.isEditInLineAuthz() == null && !isDataFieldAttributeSecurityHide(field)) { return true; } // then check edit line field authz flag is set (higher precedence) if (fieldSecurity.isEditInLineAuthz() != null && !fieldSecurity.isEditInLineAuthz().booleanValue()) { return true; } Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.GROUP_ID, collectionGroup.getId()); additionalPermissionDetails.put(KimConstants.AttributeConstants.COLLECTION_PROPERTY_NAME, collectionGroup.getPropertyName()); return isAuthorizedByTemplate(view, field, model, KimConstants.PermissionTemplateNames.EDIT_LINE_FIELD, user, additionalPermissionDetails, null, false); } public boolean canViewLineField(View view, ViewModel model, CollectionGroup collectionGroup, String collectionPropertyName, Object line, Field field, String propertyName, Person user) { FieldSecurity fieldSecurity = field.getFieldSecurity(); // check field security exists if (fieldSecurity == null) { return true; } // first check hide flag is set (lower precedence) if (fieldSecurity.isViewInLineAuthz() == null && !isDataFieldAttributeSecurityHide(field)) { return true; } // then check view line field authz flag is set (higher precedence) if (fieldSecurity.isViewInLineAuthz() != null && !fieldSecurity.isViewInLineAuthz().booleanValue()) { return true; } Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.GROUP_ID, collectionGroup.getId()); additionalPermissionDetails.put(KimConstants.AttributeConstants.COLLECTION_PROPERTY_NAME, collectionGroup.getPropertyName()); return isAuthorizedByTemplate(view, field, model, KimConstants.PermissionTemplateNames.VIEW_LINE_FIELD, user, additionalPermissionDetails, null, false); } public boolean canPerformLineAction(View view, ViewModel model, CollectionGroup collectionGroup, String collectionPropertyName, Object line, Action action, String actionEvent, String actionId, Person user) { // check perform line action authz flag is set if ((action.getActionSecurity() == null) || !action.getActionSecurity().isPerformLineActionAuthz()) { return true; } Map<String, String> additionalPermissionDetails = new HashMap<String, String>(); additionalPermissionDetails.put(KimConstants.AttributeConstants.GROUP_ID, collectionGroup.getId()); additionalPermissionDetails.put(KimConstants.AttributeConstants.COLLECTION_PROPERTY_NAME, collectionGroup.getPropertyName()); if (StringUtils.isNotBlank(actionEvent)) { additionalPermissionDetails.put(KimConstants.AttributeConstants.ACTION_EVENT, actionEvent); } return isAuthorizedByTemplate(view, action, model, KimConstants.PermissionTemplateNames.PERFORM_LINE_ACTION, user, additionalPermissionDetails, null, false); } /** * Retrieves the object from the model that is used as the context for permission checks * * <p> * Used to derive namespace and component details. Subclasses can override to return the object to be used * </p> * * @param view view instance the permission checks are being done for * @param model model object containing the data and from which the data object should be pulled * @return data object instance to use */ protected Object getDataObjectContext(View view, ViewModel model) { Object dataObject = model; if (StringUtils.isNotBlank(view.getDefaultBindingObjectPath())) { Object defaultObject = ObjectPropertyUtils.getPropertyValue(model, view.getDefaultBindingObjectPath()); if (defaultObject != null) { dataObject = defaultObject; } } return dataObject; } /** * Builds the permission details map for a field which includes the component namespace, component name, and * field id, in addition to property name for data binding fields * * @param view view instance the field belongs to * @param dataObject default object from the data model (used for subclasses to build details) * @param field field instance the details are being built for * @return permission details for the field */ protected Map<String, String> getFieldPermissionDetails(View view, Object dataObject, Field field) { Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); permissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, view.getId()); permissionDetails.put(KimConstants.AttributeConstants.FIELD_ID, field.getId()); if (field instanceof DataBinding) { permissionDetails.put(KimConstants.AttributeConstants.PROPERTY_NAME, ((DataBinding) field).getPropertyName()); } return permissionDetails; } /** * Builds the permission details map for a group which includes the component namespace, component name, and * group id, in addition to property name for collection groups * * @param view view instance the group belongs to * @param dataObject default object from the data model (used for subclasses to build details) * @param group group instance the details are being built for * @return permission details for the group */ protected Map<String, String> getGroupPermissionDetails(View view, Object dataObject, Group group) { Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); permissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, view.getId()); permissionDetails.put(KimConstants.AttributeConstants.GROUP_ID, group.getId()); if (group instanceof CollectionGroup) { permissionDetails.put(KimConstants.AttributeConstants.COLLECTION_PROPERTY_NAME, ((CollectionGroup) group).getPropertyName()); } return permissionDetails; } /** * Builds the permission details map for a widget which includes the namespace, view id, and * widget id * * @param view view instance the widget belongs to * @param dataObject default object from the data model (used for subclasses to build details) * @param widget group instance the details are being built for * @return permission details for group */ protected Map<String, String> getWidgetPermissionDetails(View view, Object dataObject, Widget widget) { Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); permissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, view.getId()); permissionDetails.put(KimConstants.AttributeConstants.WIDGET_ID, widget.getId()); return permissionDetails; } /** * Builds the permission details map for an action which includes the namespace, view id, and * action id and event * * @param view view instance the widget belongs to * @param dataObject default object from the data model (used for subclasses to build details) * @param action action instance the details are being built for * @return permission details for action */ protected Map<String, String> getActionPermissionDetails(View view, Object dataObject, Action action) { Map<String, String> permissionDetails = new HashMap<String, String>(); permissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, view.getNamespaceCode()); permissionDetails.put(KimConstants.AttributeConstants.VIEW_ID, view.getId()); permissionDetails.put(KimConstants.AttributeConstants.FIELD_ID, action.getId()); return permissionDetails; } /** * Performs a permission check for the given template name in the context of the given view and component * * <p> * First standard permission details are added based on the type of component the permission check is being * done for. * Then the {@link ComponentSecurity} of the given component is used to pick up additional permission details and * role qualifiers. * </p> * * @param view view instance the component belongs to * @param component component instance the permission check is being done for * @param model object containing the views data * @param permissionTemplateName template name for the permission to check * @param user user to perform the authorization for * @param additionalPermissionDetails additional key/value pairs to pass with the permission details * @param additionalRoleQualifications additional key/value paris to pass with the role qualifiers * @param checkPermissionExistence boolean indicating whether the existence of the permission should be checked * before performing the authorization * @return whether or not the user has authorization; this will be the case if the user has been * granted the permission or checkPermissionExistence is true and the permission does not exist */ protected boolean isAuthorizedByTemplate(View view, Component component, ViewModel model, String permissionTemplateName, Person user, Map<String, String> additionalPermissionDetails, Map<String, String> additionalRoleQualifications, boolean checkPermissionExistence) { Map<String, String> permissionDetails = new HashMap<String, String>(); Map<String, String> roleQualifications = new HashMap<String, String>(); if (additionalPermissionDetails != null) { permissionDetails.putAll(additionalPermissionDetails); } if (additionalRoleQualifications != null) { roleQualifications.putAll(additionalRoleQualifications); } Object dataObjectForContext = getDataObjectContext(view, model); // add permission details depending on the type of component if (component instanceof Field) { permissionDetails.putAll(getFieldPermissionDetails(view, dataObjectForContext, (Field) component)); } else if (component instanceof Group) { permissionDetails.putAll(getGroupPermissionDetails(view, dataObjectForContext, (Group) component)); } else if (component instanceof Widget) { permissionDetails.putAll(getWidgetPermissionDetails(view, dataObjectForContext, (Widget) component)); } else if (component instanceof Action) { permissionDetails.putAll(getActionPermissionDetails(view, dataObjectForContext, (Action) component)); } // pick up additional attributes and overrides from component security ComponentSecurity componentSecurity = component.getComponentSecurity(); // add configured overrides if (componentSecurity != null) { if (StringUtils.isNotBlank(componentSecurity.getNamespaceAttribute())) { permissionDetails.put(KimConstants.AttributeConstants.NAMESPACE_CODE, componentSecurity.getNamespaceAttribute()); } if (StringUtils.isNotBlank(componentSecurity.getComponentAttribute())) { permissionDetails.put(KimConstants.AttributeConstants.COMPONENT_NAME, componentSecurity.getComponentAttribute()); } if (StringUtils.isNotBlank(componentSecurity.getIdAttribute())) { if (component instanceof Field) { permissionDetails.put(KimConstants.AttributeConstants.FIELD_ID, componentSecurity.getIdAttribute()); } else if (component instanceof Group) { permissionDetails.put(KimConstants.AttributeConstants.GROUP_ID, componentSecurity.getIdAttribute()); } else if (component instanceof Widget) { permissionDetails.put(KimConstants.AttributeConstants.WIDGET_ID, componentSecurity.getIdAttribute()); } } if (componentSecurity.getAdditionalPermissionDetails() != null) { permissionDetails.putAll(componentSecurity.getAdditionalPermissionDetails()); } if (componentSecurity.getAdditionalRoleQualifiers() != null) { roleQualifications.putAll(componentSecurity.getAdditionalRoleQualifiers()); } } boolean result = true; if (!checkPermissionExistence || (checkPermissionExistence && permissionExistsByTemplate(dataObjectForContext, KRADConstants.KRAD_NAMESPACE, permissionTemplateName, permissionDetails))) { result = isAuthorizedByTemplate(dataObjectForContext, KRADConstants.KRAD_NAMESPACE, permissionTemplateName, user.getPrincipalId(), permissionDetails, roleQualifications); if (LOG.isDebugEnabled()) { LOG.debug("Performed permission check for: " + permissionTemplateName + " and got result: " + result); } } return result; } /** * Indicates whether the environment is non production and unmasking is not enabled by system parameter * * @return true if unmasking is turned off, false if unmasking is allowed */ private boolean isNonProductionEnvAndUnmaskingTurnedOff() { return !getConfigurationService().getPropertyValueAsString(KRADConstants.PROD_ENVIRONMENT_CODE_KEY). equalsIgnoreCase(getConfigurationService().getPropertyValueAsString(KRADConstants.ENVIRONMENT_KEY)) && !getConfigurationService().getPropertyValueAsBoolean(KRADConstants.ENABLE_NONPRODUCTION_UNMASKING); } /** * Determines whether {@code AttributeSecurity} is set on the {@code DataField} and if it is, whether its hide * attribute is enabled. * * @param field the field to check for the hide attribute * * @return true if the hide attribute is enabled, false otherwise */ private boolean isDataFieldAttributeSecurityHide(Field field) { if (field instanceof DataField) { DataField dataField = (DataField) field; DataFieldSecurity dataFieldSecurity = dataField.getDataFieldSecurity(); if (dataFieldSecurity == null) { return false; } if (dataFieldSecurity.getAttributeSecurity() == null || !dataFieldSecurity.getAttributeSecurity().isHide()) { return false; } return true; } else { return false; } } @BeanTagAttribute(name="configurationService",type= BeanTagAttribute.AttributeType.SINGLEBEAN) protected ConfigurationService getConfigurationService() { if (configurationService == null) { return CoreApiServiceLocator.getKualiConfigurationService(); } return configurationService; } public void setConfigurationService(ConfigurationService configurationService) { this.configurationService = configurationService; } protected RequestAuthorizationCache getRequestAuthorizationCache() { return requestAuthorizationCache; } /** * {@inheritDoc} */ @Override public void setRequestAuthorizationCache(RequestAuthorizationCache requestAuthorizationCache) { this.requestAuthorizationCache = requestAuthorizationCache; } }
/* * Copyright 2008-2013 Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.psiutils; import com.intellij.openapi.util.Comparing; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.DirectClassInheritorsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.Query; import com.siyeh.HardcodedMethodConstants; import consulo.java.module.util.JavaClassNames; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; public class WeakestTypeFinder { private WeakestTypeFinder() {} @Nonnull public static Collection<PsiClass> calculateWeakestClassesNecessary(@Nonnull PsiElement variableOrMethod, boolean useRighthandTypeAsWeakestTypeInAssignments, boolean useParameterizedTypeForCollectionMethods) { final PsiType variableOrMethodType; if (variableOrMethod instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)variableOrMethod; variableOrMethodType = variable.getType(); } else if (variableOrMethod instanceof PsiMethod) { final PsiMethod method = (PsiMethod)variableOrMethod; variableOrMethodType = method.getReturnType(); if (PsiType.VOID.equals(variableOrMethodType)) { return Collections.emptyList(); } } else { throw new IllegalArgumentException("PsiMethod or PsiVariable expected: " + variableOrMethod); } if (!(variableOrMethodType instanceof PsiClassType)) { return Collections.emptyList(); } final PsiClassType variableOrMethodClassType = (PsiClassType)variableOrMethodType; final PsiClass variableOrMethodClass = variableOrMethodClassType.resolve(); if (variableOrMethodClass == null) { return Collections.emptyList(); } Set<PsiClass> weakestTypeClasses = new HashSet<PsiClass>(); final GlobalSearchScope scope = variableOrMethod.getResolveScope(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(variableOrMethod.getProject()); final PsiClass lowerBoundClass; if (variableOrMethod instanceof PsiResourceVariable) { lowerBoundClass = facade.findClass(JavaClassNames.JAVA_LANG_AUTO_CLOSEABLE, scope); if (lowerBoundClass == null || variableOrMethodClass.equals(lowerBoundClass)) { return Collections.emptyList(); } weakestTypeClasses.add(lowerBoundClass); final PsiResourceVariable resourceVariable = (PsiResourceVariable)variableOrMethod; @NonNls final String methodCallText = resourceVariable.getName() + ".close()"; final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)facade.getElementFactory().createExpressionFromText(methodCallText, resourceVariable.getParent()); if (!findWeakestType(methodCallExpression, weakestTypeClasses)) { return Collections.emptyList(); } } else { lowerBoundClass = facade.findClass(JavaClassNames.JAVA_LANG_OBJECT, scope); if (lowerBoundClass == null || variableOrMethodClass.equals(lowerBoundClass)) { return Collections.emptyList(); } weakestTypeClasses.add(lowerBoundClass); } final Query<PsiReference> query = ReferencesSearch.search(variableOrMethod, variableOrMethod.getUseScope()); boolean hasUsages = false; for (PsiReference reference : query) { if (reference == null) { continue; } hasUsages = true; PsiElement referenceElement = reference.getElement(); PsiElement referenceParent = referenceElement.getParent(); if (referenceParent instanceof PsiMethodCallExpression) { referenceElement = referenceParent; referenceParent = referenceElement.getParent(); } final PsiElement referenceGrandParent = referenceParent.getParent(); if (referenceParent instanceof PsiExpressionList) { if (!(referenceGrandParent instanceof PsiMethodCallExpression)) { return Collections.emptyList(); } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)referenceGrandParent; if (!findWeakestType(referenceElement, methodCallExpression, useParameterizedTypeForCollectionMethods, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceGrandParent instanceof PsiMethodCallExpression) { final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)referenceGrandParent; if (!findWeakestType(methodCallExpression, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiAssignmentExpression) { final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)referenceParent; if (!findWeakestType(referenceElement, assignmentExpression, useRighthandTypeAsWeakestTypeInAssignments, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)referenceParent; final PsiType type = variable.getType(); if (!checkType(type, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiForeachStatement) { final PsiForeachStatement foreachStatement = (PsiForeachStatement)referenceParent; if (!Comparing.equal(foreachStatement.getIteratedValue(), referenceElement)) { return Collections.emptyList(); } final PsiClass javaLangIterableClass = facade.findClass(JavaClassNames.JAVA_LANG_ITERABLE, scope); if (javaLangIterableClass == null) { return Collections.emptyList(); } checkClass(javaLangIterableClass, weakestTypeClasses); } else if (referenceParent instanceof PsiReturnStatement) { final PsiMethod containingMethod = PsiTreeUtil.getParentOfType(referenceParent, PsiMethod.class); if (containingMethod == null) { return Collections.emptyList(); } final PsiType type = containingMethod.getReturnType(); if (!checkType(type, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiReferenceExpression) { // field access, method call is handled above. final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)referenceParent; final PsiElement target = referenceExpression.resolve(); if (!(target instanceof PsiField)) { return Collections.emptyList(); } final PsiField field = (PsiField)target; final PsiClass containingClass = field.getContainingClass(); checkClass(containingClass, weakestTypeClasses); } else if (referenceParent instanceof PsiArrayInitializerExpression) { final PsiArrayInitializerExpression arrayInitializerExpression = (PsiArrayInitializerExpression)referenceParent; if (!findWeakestType(arrayInitializerExpression, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiThrowStatement) { final PsiThrowStatement throwStatement = (PsiThrowStatement)referenceParent; if (!findWeakestType(throwStatement, variableOrMethodClass, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiConditionalExpression) { final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)referenceParent; final PsiExpression condition = conditionalExpression.getCondition(); if (referenceElement.equals(condition)) { return Collections.emptyList(); } final PsiType type = ExpectedTypeUtils.findExpectedType( conditionalExpression, true); if (!checkType(type, weakestTypeClasses)) { return Collections.emptyList(); } } else if (referenceParent instanceof PsiBinaryExpression) { // strings only final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)referenceParent; final PsiType type = binaryExpression.getType(); if (variableOrMethodType.equals(type)) { if (!checkType(type, weakestTypeClasses)) { return Collections.emptyList(); } } } else if (referenceParent instanceof PsiSwitchStatement) { // only enums and primitives can be a switch expression return Collections.emptyList(); } else if (referenceParent instanceof PsiPrefixExpression) { // only primitives and boxed types are the target of a prefix // expression return Collections.emptyList(); } else if (referenceParent instanceof PsiPostfixExpression) { // only primitives and boxed types are the target of a postfix // expression return Collections.emptyList(); } else if (referenceParent instanceof PsiIfStatement) { // only booleans and boxed Booleans are the condition of an if // statement return Collections.emptyList(); } else if (referenceParent instanceof PsiForStatement) { // only booleans and boxed Booleans are the condition of an // for statement return Collections.emptyList(); } else if (referenceParent instanceof PsiNewExpression) { final PsiNewExpression newExpression = (PsiNewExpression)referenceParent; final PsiExpression qualifier = newExpression.getQualifier(); if (qualifier != null) { final PsiType type = newExpression.getType(); if (!(type instanceof PsiClassType)) { return Collections.emptyList(); } final PsiClassType classType = (PsiClassType)type; final PsiClass innerClass = classType.resolve(); if (innerClass == null) { return Collections.emptyList(); } final PsiClass outerClass = innerClass.getContainingClass(); if (outerClass != null) { checkClass(outerClass, weakestTypeClasses); } } } if (weakestTypeClasses.contains(variableOrMethodClass) || weakestTypeClasses.isEmpty()) { return Collections.emptyList(); } } if (!hasUsages) { return Collections.emptyList(); } weakestTypeClasses = filterAccessibleClasses(weakestTypeClasses, variableOrMethod); return weakestTypeClasses; } private static boolean findWeakestType(PsiElement referenceElement, PsiMethodCallExpression methodCallExpression, boolean useParameterizedTypeForCollectionMethods, Set<PsiClass> weakestTypeClasses) { if (!(referenceElement instanceof PsiExpression)) { return false; } final JavaResolveResult resolveResult = methodCallExpression.resolveMethodGenerics(); final PsiMethod method = (PsiMethod)resolveResult.getElement(); if (method == null) { return false; } final PsiSubstitutor substitutor = resolveResult.getSubstitutor(); final PsiExpressionList expressionList = methodCallExpression.getArgumentList(); final PsiExpression[] expressions = expressionList.getExpressions(); final int index = ArrayUtil.indexOf(expressions, referenceElement); if (index < 0) { return false; } final PsiParameterList parameterList = method.getParameterList(); if (parameterList.getParametersCount() == 0) { return false; } final PsiParameter[] parameters = parameterList.getParameters(); final PsiParameter parameter; final PsiType type; if (index < parameters.length) { parameter = parameters[index]; type = parameter.getType(); } else { parameter = parameters[parameters.length - 1]; type = parameter.getType(); if (!(type instanceof PsiEllipsisType)) { return false; } } if (!useParameterizedTypeForCollectionMethods) { return checkType(type, substitutor, weakestTypeClasses); } @NonNls final String methodName = method.getName(); if (HardcodedMethodConstants.REMOVE.equals(methodName) || HardcodedMethodConstants.GET.equals(methodName) || "containsKey".equals(methodName) || "containsValue".equals(methodName) || "contains".equals(methodName) || HardcodedMethodConstants.INDEX_OF.equals(methodName) || HardcodedMethodConstants.LAST_INDEX_OF.equals(methodName)) { final PsiClass containingClass = method.getContainingClass(); if (InheritanceUtil.isInheritor(containingClass, JavaClassNames.JAVA_UTIL_MAP) || InheritanceUtil.isInheritor(containingClass, JavaClassNames.JAVA_UTIL_COLLECTION)) { final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final PsiExpression qualifier = methodExpression.getQualifierExpression(); if (qualifier != null) { final PsiType qualifierType = qualifier.getType(); if (qualifierType instanceof PsiClassType) { final PsiClassType classType = (PsiClassType)qualifierType; final PsiType[] parameterTypes = classType.getParameters(); if (parameterTypes.length > 0) { final PsiType parameterType = parameterTypes[0]; final PsiExpression expression = expressions[index]; final PsiType expressionType = expression.getType(); if (expressionType == null || parameterType == null || !parameterType.isAssignableFrom(expressionType)) { return false; } return checkType(parameterType, substitutor, weakestTypeClasses); } } } } } return checkType(type, substitutor, weakestTypeClasses); } private static boolean checkType(@javax.annotation.Nullable PsiType type, @Nonnull PsiSubstitutor substitutor, @Nonnull Collection<PsiClass> weakestTypeClasses) { if (!(type instanceof PsiClassType)) { return false; } final PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (aClass == null) { return false; } if (aClass instanceof PsiTypeParameter) { final PsiType substitution = substitutor.substitute((PsiTypeParameter)aClass); return checkType(substitution, weakestTypeClasses); } checkClass(aClass, weakestTypeClasses); return true; } private static boolean findWeakestType(PsiMethodCallExpression methodCallExpression, Set<PsiClass> weakestTypeClasses) { final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final PsiElement target = methodExpression.resolve(); if (!(target instanceof PsiMethod)) { return false; } final PsiMethod method = (PsiMethod)target; final PsiReferenceList throwsList = method.getThrowsList(); final PsiClassType[] classTypes = throwsList.getReferencedTypes(); final Collection<PsiClassType> thrownTypes = new HashSet<PsiClassType>(Arrays.asList(classTypes)); final List<PsiMethod> superMethods = findAllSuperMethods(method); boolean checked = false; if (!superMethods.isEmpty()) { final PsiType expectedType = ExpectedTypeUtils.findExpectedType(methodCallExpression, false); for (PsiMethod superMethod : superMethods) { final PsiType returnType = superMethod.getReturnType(); if (expectedType != null && returnType != null && !expectedType.isAssignableFrom(returnType)) { continue; } if (throwsIncompatibleException(superMethod, thrownTypes)) { continue; } if (!PsiUtil.isAccessible(superMethod, methodCallExpression, null)) { continue; } final PsiClass containingClass = superMethod.getContainingClass(); checkClass(containingClass, weakestTypeClasses); checked = true; } } if (!checked) { final PsiType returnType = method.getReturnType(); if (returnType instanceof PsiClassType) { final PsiClassType classType = (PsiClassType)returnType; final PsiClass aClass = classType.resolve(); if (aClass instanceof PsiTypeParameter) { return false; } } final PsiClass containingClass = method.getContainingClass(); checkClass(containingClass, weakestTypeClasses); } return true; } private static List<PsiMethod> findAllSuperMethods(PsiMethod method) { final List<PsiMethod> methods = findAllSuperMethods(method, new ArrayList()); Collections.reverse(methods); return methods; } private static List<PsiMethod> findAllSuperMethods(PsiMethod method, List<PsiMethod> result) { final PsiMethod[] superMethods = method.findSuperMethods(); Collections.addAll(result, superMethods); for (PsiMethod superMethod : superMethods) { findAllSuperMethods(superMethod, result); } return result; } private static boolean findWeakestType(PsiElement referenceElement, PsiAssignmentExpression assignmentExpression, boolean useRighthandTypeAsWeakestTypeInAssignments, Set<PsiClass> weakestTypeClasses) { final IElementType tokenType = assignmentExpression.getOperationTokenType(); if (JavaTokenType.EQ != tokenType) { return false; } final PsiExpression lhs = assignmentExpression.getLExpression(); final PsiExpression rhs = assignmentExpression.getRExpression(); final PsiType lhsType = lhs.getType(); if (referenceElement.equals(rhs)) { if (!checkType(lhsType, weakestTypeClasses)) { return false; } } else if (useRighthandTypeAsWeakestTypeInAssignments) { if (rhs == null) { return false; } if (!(rhs instanceof PsiNewExpression) || !(rhs instanceof PsiTypeCastExpression)) { final PsiType rhsType = rhs.getType(); if (lhsType == null || lhsType.equals(rhsType)) { return false; } } } return true; } private static boolean findWeakestType(PsiArrayInitializerExpression arrayInitializerExpression, Set<PsiClass> weakestTypeClasses) { final PsiType type = arrayInitializerExpression.getType(); if (!(type instanceof PsiArrayType)) { return false; } final PsiArrayType arrayType = (PsiArrayType)type; final PsiType componentType = arrayType.getComponentType(); return checkType(componentType, weakestTypeClasses); } private static boolean findWeakestType(PsiThrowStatement throwStatement, PsiClass variableOrMethodClass, Set<PsiClass> weakestTypeClasses) { final PsiClassType runtimeExceptionType = TypeUtils.getType(JavaClassNames.JAVA_LANG_RUNTIME_EXCEPTION, throwStatement); final PsiClass runtimeExceptionClass = runtimeExceptionType.resolve(); if (runtimeExceptionClass != null && InheritanceUtil.isInheritorOrSelf(variableOrMethodClass, runtimeExceptionClass, true)) { if (!checkType(runtimeExceptionType, weakestTypeClasses)) { return false; } } else { final PsiMethod method = PsiTreeUtil.getParentOfType(throwStatement, PsiMethod.class); if (method == null) { return false; } final PsiReferenceList throwsList = method.getThrowsList(); final PsiClassType[] referencedTypes = throwsList.getReferencedTypes(); boolean checked = false; for (PsiClassType referencedType : referencedTypes) { final PsiClass throwableClass = referencedType.resolve(); if (throwableClass == null || !InheritanceUtil.isInheritorOrSelf(variableOrMethodClass, throwableClass, true)) { continue; } if (!checkType(referencedType, weakestTypeClasses)) { continue; } checked = true; break; } if (!checked) { return false; } } return true; } private static boolean throwsIncompatibleException(PsiMethod method, Collection<PsiClassType> exceptionTypes) { final PsiReferenceList superThrowsList = method.getThrowsList(); final PsiClassType[] superThrownTypes = superThrowsList.getReferencedTypes(); outer: for (PsiClassType superThrownType : superThrownTypes) { if (exceptionTypes.contains(superThrownType)) { continue; } for (PsiClassType exceptionType : exceptionTypes) { if (InheritanceUtil.isInheritor(superThrownType, exceptionType.getCanonicalText())) { continue outer; } } final PsiClass aClass = superThrownType.resolve(); if (aClass == null) { return true; } if (!InheritanceUtil.isInheritor(aClass, JavaClassNames.JAVA_LANG_RUNTIME_EXCEPTION) && !InheritanceUtil.isInheritor(aClass, JavaClassNames.JAVA_LANG_ERROR)) { return true; } } return false; } private static boolean checkType(@Nullable PsiType type, @Nonnull Collection<PsiClass> weakestTypeClasses) { if (!(type instanceof PsiClassType)) { return false; } final PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (aClass == null) { return false; } checkClass(aClass, weakestTypeClasses); return true; } public static Set<PsiClass> filterAccessibleClasses(Set<PsiClass> weakestTypeClasses, PsiElement context) { final Set<PsiClass> result = new HashSet<PsiClass>(); for (PsiClass weakestTypeClass : weakestTypeClasses) { if (PsiUtil.isAccessible(weakestTypeClass, context, null)) { result.add(weakestTypeClass); continue; } final PsiClass visibleInheritor = getVisibleInheritor(weakestTypeClass, context); if (visibleInheritor != null) { result.add(visibleInheritor); } } return result; } @javax.annotation.Nullable private static PsiClass getVisibleInheritor(PsiClass superClass, PsiElement context) { final Query<PsiClass> search = DirectClassInheritorsSearch.search(superClass, context.getResolveScope()); for (PsiClass aClass : search) { if (superClass.isInheritor(aClass, true)) { if (PsiUtil.isAccessible(aClass, context, null)) { return aClass; } else { return getVisibleInheritor(aClass, context); } } } return null; } private static void checkClass(@javax.annotation.Nullable PsiClass aClass, @Nonnull Collection<PsiClass> weakestTypeClasses) { if (aClass == null) { return; } boolean shouldAdd = true; for (Iterator<PsiClass> iterator = weakestTypeClasses.iterator(); iterator.hasNext(); ) { final PsiClass weakestTypeClass = iterator.next(); if (!weakestTypeClass.equals(aClass)) { if (aClass.isInheritor(weakestTypeClass, true)) { iterator.remove(); } else if (weakestTypeClass.isInheritor(aClass, true)) { shouldAdd = false; } else { iterator.remove(); shouldAdd = false; } } else { shouldAdd = false; } } if (shouldAdd) { weakestTypeClasses.add(aClass); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.mapreduce.client; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import com.google.common.collect.Maps; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.ClusterMetrics; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.QueueAclsInfo; import org.apache.hadoop.mapreduce.QueueInfo; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskCompletionEvent; import org.apache.hadoop.mapreduce.TaskReport; import org.apache.hadoop.mapreduce.TaskTrackerInfo; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.protocol.ClientProtocol; import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader; import org.apache.hadoop.mapreduce.v2.LogParams; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; import org.apache.tez.client.MRTezClient; import org.apache.tez.common.TezUtils; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.DataSinkDescriptor; import org.apache.tez.dag.api.DataSourceDescriptor; import org.apache.tez.dag.api.Edge; import org.apache.tez.dag.api.InputDescriptor; import org.apache.tez.dag.api.OutputCommitterDescriptor; import org.apache.tez.dag.api.OutputDescriptor; import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.VertexLocationHint; import org.apache.tez.dag.api.TaskLocationHint; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.api.client.MRDAGClient; import org.apache.tez.dag.library.vertexmanager.ShuffleVertexManager; import org.apache.tez.mapreduce.committer.MROutputCommitter; import org.apache.tez.mapreduce.hadoop.DeprecatedKeys; import org.apache.tez.mapreduce.hadoop.MRHelpers; import org.apache.tez.mapreduce.hadoop.MRInputHelpers; import org.apache.tez.mapreduce.hadoop.MRJobConfig; import org.apache.tez.mapreduce.hadoop.MultiStageMRConfToTezTranslator; import org.apache.tez.mapreduce.hadoop.MultiStageMRConfigUtil; import org.apache.tez.mapreduce.input.MRInput; import org.apache.tez.mapreduce.input.MRInputLegacy; import org.apache.tez.mapreduce.output.MROutputLegacy; import org.apache.tez.mapreduce.partition.MRPartitioner; import org.apache.tez.mapreduce.processor.map.MapProcessor; import org.apache.tez.mapreduce.processor.reduce.ReduceProcessor; import org.apache.tez.mapreduce.protos.MRRuntimeProtos; import org.apache.tez.runtime.library.api.TezRuntimeConfiguration; import org.apache.tez.runtime.library.conf.OrderedPartitionedKVEdgeConfig; import com.google.common.annotations.VisibleForTesting; /** * This class enables the current JobClient (0.22 hadoop) to run on YARN-TEZ. */ @SuppressWarnings({ "unchecked" }) @LimitedPrivate("Mapreduce") public class YARNRunner implements ClientProtocol { private static final Log LOG = LogFactory.getLog(YARNRunner.class); private ResourceMgrDelegate resMgrDelegate; private ClientCache clientCache; private Configuration conf; private final FileContext defaultFileContext; final public static FsPermission DAG_FILE_PERMISSION = FsPermission.createImmutable((short) 0644); final public static int UTF8_CHUNK_SIZE = 16 * 1024; private final TezConfiguration tezConf; private MRTezClient tezClient; private MRDAGClient dagClient; /** * Yarn runner incapsulates the client interface of * yarn * @param conf the configuration object for the client */ public YARNRunner(Configuration conf) { this(conf, new ResourceMgrDelegate(new YarnConfiguration(conf))); } /** * Similar to {@link #YARNRunner(Configuration)} but allowing injecting * {@link ResourceMgrDelegate}. Enables mocking and testing. * @param conf the configuration object for the client * @param resMgrDelegate the resourcemanager client handle. */ public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) { this(conf, resMgrDelegate, new ClientCache(conf, resMgrDelegate)); } /** * Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)} * but allowing injecting {@link ClientCache}. Enable mocking and testing. * @param conf the configuration object * @param resMgrDelegate the resource manager delegate * @param clientCache the client cache object. */ public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate, ClientCache clientCache) { this.conf = conf; this.tezConf = new TezConfiguration(conf); try { this.resMgrDelegate = resMgrDelegate; this.clientCache = clientCache; this.defaultFileContext = FileContext.getFileContext(this.conf); } catch (UnsupportedFileSystemException ufe) { throw new RuntimeException("Error in instantiating YarnClient", ufe); } } @VisibleForTesting @Private /** * Used for testing mostly. * @param resMgrDelegate the resource manager delegate to set to. */ public void setResourceMgrDelegate(ResourceMgrDelegate resMgrDelegate) { this.resMgrDelegate = resMgrDelegate; } @Override public void cancelDelegationToken(Token<DelegationTokenIdentifier> arg0) throws IOException, InterruptedException { throw new UnsupportedOperationException("Use Token.renew instead"); } @Override public TaskTrackerInfo[] getActiveTrackers() throws IOException, InterruptedException { return resMgrDelegate.getActiveTrackers(); } @Override public JobStatus[] getAllJobs() throws IOException, InterruptedException { return resMgrDelegate.getAllJobs(); } @Override public TaskTrackerInfo[] getBlacklistedTrackers() throws IOException, InterruptedException { return resMgrDelegate.getBlacklistedTrackers(); } @Override public ClusterMetrics getClusterMetrics() throws IOException, InterruptedException { return resMgrDelegate.getClusterMetrics(); } @Override public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException, InterruptedException { // The token is only used for serialization. So the type information // mismatch should be fine. return resMgrDelegate.getDelegationToken(renewer); } @Override public String getFilesystemName() throws IOException, InterruptedException { return resMgrDelegate.getFilesystemName(); } @Override public JobID getNewJobID() throws IOException, InterruptedException { return resMgrDelegate.getNewJobID(); } @Override public QueueInfo getQueue(String queueName) throws IOException, InterruptedException { return resMgrDelegate.getQueue(queueName); } @Override public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException, InterruptedException { return resMgrDelegate.getQueueAclsForCurrentUser(); } @Override public QueueInfo[] getQueues() throws IOException, InterruptedException { return resMgrDelegate.getQueues(); } @Override public QueueInfo[] getRootQueues() throws IOException, InterruptedException { return resMgrDelegate.getRootQueues(); } @Override public QueueInfo[] getChildQueues(String parent) throws IOException, InterruptedException { return resMgrDelegate.getChildQueues(parent); } @Override public String getStagingAreaDir() throws IOException, InterruptedException { return resMgrDelegate.getStagingAreaDir(); } @Override public String getSystemDir() throws IOException, InterruptedException { return resMgrDelegate.getSystemDir(); } @Override public long getTaskTrackerExpiryInterval() throws IOException, InterruptedException { return resMgrDelegate.getTaskTrackerExpiryInterval(); } private Map<String, LocalResource> createJobLocalResources( Configuration jobConf, String jobSubmitDir) throws IOException { // Setup LocalResources Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE); URL yarnUrlForJobSubmitDir = ConverterUtils .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem() .resolvePath( defaultFileContext.makeQualified(new Path(jobSubmitDir)))); LOG.debug("Creating setup context, jobSubmitDir url is " + yarnUrlForJobSubmitDir); localResources.put(MRJobConfig.JOB_CONF_FILE, createApplicationResource(defaultFileContext, jobConfPath, LocalResourceType.FILE)); if (jobConf.get(MRJobConfig.JAR) != null) { Path jobJarPath = new Path(jobConf.get(MRJobConfig.JAR)); LocalResource rc = createApplicationResource(defaultFileContext, jobJarPath, LocalResourceType.FILE); // FIXME fix pattern support // String pattern = conf.getPattern(JobContext.JAR_UNPACK_PATTERN, // JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern(); // rc.setPattern(pattern); localResources.put(MRJobConfig.JOB_JAR, rc); } else { // Job jar may be null. For e.g, for pipes, the job jar is the hadoop // mapreduce jar itself which is already on the classpath. LOG.info("Job jar is not present. " + "Not adding any jar to the list of resources."); } // TODO gross hack for (String s : new String[] { MRJobConfig.JOB_SPLIT, MRJobConfig.JOB_SPLIT_METAINFO}) { localResources.put(s, createApplicationResource(defaultFileContext, new Path(jobSubmitDir, s), LocalResourceType.FILE)); } MRApps.setupDistributedCache(jobConf, localResources); return localResources; } // FIXME isn't this a nice mess of a client? // read input, write splits, read splits again private List<TaskLocationHint> getMapLocationHintsFromInputSplits(JobID jobId, FileSystem fs, Configuration conf, String jobSubmitDir) throws IOException { TaskSplitMetaInfo[] splitsInfo = SplitMetaInfoReader.readSplitMetaInfo(jobId, fs, conf, new Path(jobSubmitDir)); int splitsCount = splitsInfo.length; List<TaskLocationHint> locationHints = new ArrayList<TaskLocationHint>(splitsCount); for (int i = 0; i < splitsCount; ++i) { TaskLocationHint locationHint = TaskLocationHint.createTaskLocationHint( new HashSet<String>( Arrays.asList(splitsInfo[i].getLocations())), null ); locationHints.add(locationHint); } return locationHints; } private void setupMapReduceEnv(Configuration jobConf, Map<String, String> environment, boolean isMap) throws IOException { if (isMap) { warnForJavaLibPath( jobConf.get(MRJobConfig.MAP_JAVA_OPTS,""), "map", MRJobConfig.MAP_JAVA_OPTS, MRJobConfig.MAP_ENV); warnForJavaLibPath( jobConf.get(MRJobConfig.MAPRED_MAP_ADMIN_JAVA_OPTS,""), "map", MRJobConfig.MAPRED_MAP_ADMIN_JAVA_OPTS, MRJobConfig.MAPRED_ADMIN_USER_ENV); } else { warnForJavaLibPath( jobConf.get(MRJobConfig.REDUCE_JAVA_OPTS,""), "reduce", MRJobConfig.REDUCE_JAVA_OPTS, MRJobConfig.REDUCE_ENV); warnForJavaLibPath( jobConf.get(MRJobConfig.MAPRED_REDUCE_ADMIN_JAVA_OPTS,""), "reduce", MRJobConfig.MAPRED_REDUCE_ADMIN_JAVA_OPTS, MRJobConfig.MAPRED_ADMIN_USER_ENV); } MRHelpers.updateEnvBasedOnMRTaskEnv(jobConf, environment, isMap); } private Vertex createVertexForStage(Configuration stageConf, Map<String, LocalResource> jobLocalResources, List<TaskLocationHint> locations, int stageNum, int totalStages) throws IOException { // stageNum starts from 0, goes till numStages - 1 boolean isMap = false; if (stageNum == 0) { isMap = true; } int numTasks = isMap ? stageConf.getInt(MRJobConfig.NUM_MAPS, 0) : stageConf.getInt(MRJobConfig.NUM_REDUCES, 0); String processorName = isMap ? MapProcessor.class.getName() : ReduceProcessor.class.getName(); String vertexName = null; if (isMap) { vertexName = MultiStageMRConfigUtil.getInitialMapVertexName(); } else { if (stageNum == totalStages - 1) { vertexName = MultiStageMRConfigUtil.getFinalReduceVertexName(); } else { vertexName = MultiStageMRConfigUtil .getIntermediateStageVertexName(stageNum); } } Resource taskResource = isMap ? MRHelpers.getResourceForMRMapper(stageConf) : MRHelpers.getResourceForMRReducer(stageConf); stageConf.set(MRJobConfig.MROUTPUT_FILE_NAME_PREFIX, "part"); UserPayload vertexUserPayload = TezUtils.createUserPayloadFromConf(stageConf); Vertex vertex = Vertex.create(vertexName, ProcessorDescriptor.create(processorName).setUserPayload(vertexUserPayload), numTasks, taskResource); if (stageConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT, TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT_DEFAULT)) { vertex.getProcessorDescriptor().setHistoryText(TezUtils.convertToHistoryText(stageConf)); } if (isMap) { vertex.addDataSource("MRInput", configureMRInputWithLegacySplitsGenerated(stageConf, true)); } // Map only jobs. if (stageNum == totalStages -1) { OutputDescriptor od = OutputDescriptor.create(MROutputLegacy.class.getName()) .setUserPayload(vertexUserPayload); if (stageConf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT, TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT_DEFAULT)) { od.setHistoryText(TezUtils.convertToHistoryText(stageConf)); } vertex.addDataSink("MROutput", DataSinkDescriptor.create(od, OutputCommitterDescriptor.create(MROutputCommitter.class.getName()), null)); } Map<String, String> taskEnv = new HashMap<String, String>(); setupMapReduceEnv(stageConf, taskEnv, isMap); Map<String, LocalResource> taskLocalResources = new TreeMap<String, LocalResource>(); // PRECOMMIT Remove split localization for reduce tasks if it's being set // here taskLocalResources.putAll(jobLocalResources); String taskJavaOpts = isMap ? MRHelpers.getJavaOptsForMRMapper(stageConf) : MRHelpers.getJavaOptsForMRReducer(stageConf); vertex.setTaskEnvironment(taskEnv) .addTaskLocalFiles(taskLocalResources) .setLocationHint(VertexLocationHint.create(locations)) .setTaskLaunchCmdOpts(taskJavaOpts); if (!isMap) { vertex.setVertexManagerPlugin((ShuffleVertexManager.createConfigBuilder(stageConf).build())); } if (LOG.isDebugEnabled()) { LOG.debug("Adding vertex to DAG" + ", vertexName=" + vertex.getName() + ", processor=" + vertex.getProcessorDescriptor().getClassName() + ", parallelism=" + vertex.getParallelism() + ", javaOpts=" + vertex.getTaskLaunchCmdOpts() + ", resources=" + vertex.getTaskResource() // TODO Add localResources and Environment ); } return vertex; } private DAG createDAG(FileSystem fs, JobID jobId, Configuration[] stageConfs, String jobSubmitDir, Credentials ts, Map<String, LocalResource> jobLocalResources) throws IOException { String jobName = stageConfs[0].get(MRJobConfig.JOB_NAME, YarnConfiguration.DEFAULT_APPLICATION_NAME); DAG dag = DAG.create(jobName); LOG.info("Number of stages: " + stageConfs.length); List<TaskLocationHint> mapInputLocations = getMapLocationHintsFromInputSplits( jobId, fs, stageConfs[0], jobSubmitDir); List<TaskLocationHint> reduceInputLocations = null; Vertex[] vertices = new Vertex[stageConfs.length]; for (int i = 0; i < stageConfs.length; i++) { vertices[i] = createVertexForStage(stageConfs[i], jobLocalResources, i == 0 ? mapInputLocations : reduceInputLocations, i, stageConfs.length); } for (int i = 0; i < vertices.length; i++) { dag.addVertex(vertices[i]); if (i > 0) { // Set edge conf based on Input conf (compression etc properties for MapReduce are // w.r.t Outputs - MAP_OUTPUT_COMPRESS for example) Map<String, String> partitionerConf = null; if (stageConfs[i-1] != null) { partitionerConf = Maps.newHashMap(); for (Map.Entry<String, String> entry : stageConfs[i - 1]) { partitionerConf.put(entry.getKey(), entry.getValue()); } } OrderedPartitionedKVEdgeConfig edgeConf = OrderedPartitionedKVEdgeConfig.newBuilder(stageConfs[i - 1].get( TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS), stageConfs[i - 1].get(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS), MRPartitioner.class.getName(), partitionerConf) .configureInput().useLegacyInput().done() .setFromConfiguration(stageConfs[i - 1]).build(); Edge edge = Edge.create(vertices[i - 1], vertices[i], edgeConf.createDefaultEdgeProperty()); dag.addEdge(edge); } } return dag; } private TezConfiguration getDAGAMConfFromMRConf() { TezConfiguration finalConf = new TezConfiguration(this.tezConf); Map<String, String> mrParamToDAGParamMap = DeprecatedKeys .getMRToDAGParamMap(); for (Entry<String, String> entry : mrParamToDAGParamMap.entrySet()) { if (finalConf.get(entry.getKey()) != null) { finalConf.set(entry.getValue(), finalConf.get(entry.getKey())); finalConf.unset(entry.getKey()); if (LOG.isDebugEnabled()) { LOG.debug("MR->DAG Translating MR key: " + entry.getKey() + " to Tez key: " + entry.getValue() + " with value " + finalConf.get(entry.getValue())); } } } return finalConf; } private void maybeKillSession() throws IOException { String sessionAppIdToKill = conf.get("mapreduce.tez.session.tokill-application-id"); if (sessionAppIdToKill != null) { ApplicationId killAppId = ConverterUtils.toApplicationId(sessionAppIdToKill); try { resMgrDelegate.killApplication(killAppId); } catch (YarnException e) { throw new IOException("Failed while killing Session AppId", e); } } } @Override public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) throws IOException, InterruptedException { // HACK! TEZ-604. Get rid of this once Hive moves all of it's tasks over to Tez native. maybeKillSession(); ApplicationId appId = resMgrDelegate.getApplicationId(); FileSystem fs = FileSystem.get(conf); // Loads the job.xml written by the user. JobConf jobConf = new JobConf(new TezConfiguration(conf)); // Extract individual raw MR configs. Configuration[] stageConfs = MultiStageMRConfToTezTranslator.getStageConfs(jobConf); // Transform all confs to use Tez keys for (int i = 0; i < stageConfs.length; i++) { MRHelpers.translateMRConfToTez(stageConfs[i]); } // create inputs to tezClient.submit() // FIXME set up job resources Map<String, LocalResource> jobLocalResources = createJobLocalResources(stageConfs[0], jobSubmitDir); // FIXME createDAG should take the tezConf as a parameter, instead of using // MR keys. DAG dag = createDAG(fs, jobId, stageConfs, jobSubmitDir, ts, jobLocalResources); List<String> vargs = new LinkedList<String>(); // admin command opts and user command opts String mrAppMasterAdminOptions = conf.get(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, MRJobConfig.DEFAULT_MR_AM_ADMIN_COMMAND_OPTS); warnForJavaLibPath(mrAppMasterAdminOptions, "app master", MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, MRJobConfig.MR_AM_ADMIN_USER_ENV); vargs.add(mrAppMasterAdminOptions); // Add AM user command opts String mrAppMasterUserOptions = conf.get(MRJobConfig.MR_AM_COMMAND_OPTS, MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS); warnForJavaLibPath(mrAppMasterUserOptions, "app master", MRJobConfig.MR_AM_COMMAND_OPTS, MRJobConfig.MR_AM_ENV); vargs.add(mrAppMasterUserOptions); StringBuilder javaOpts = new StringBuilder(); for (String varg : vargs) { javaOpts.append(varg).append(" "); } // Setup the CLASSPATH in environment // i.e. add { Hadoop jars, job jar, CWD } to classpath. Map<String, String> environment = new HashMap<String, String>(); // Setup the environment variables for AM MRHelpers.updateEnvBasedOnMRAMEnv(conf, environment); StringBuilder envStrBuilder = new StringBuilder(); boolean first = true; for (Entry<String, String> entry : environment.entrySet()) { if (!first) { envStrBuilder.append(","); } else { first = false; } envStrBuilder.append(entry.getKey()).append("=").append(entry.getValue()); } String envStr = envStrBuilder.toString(); TezConfiguration dagAMConf = getDAGAMConfFromMRConf(); dagAMConf.set(TezConfiguration.TEZ_AM_LAUNCH_CMD_OPTS, javaOpts.toString()); if (envStr.length() > 0) { dagAMConf.set(TezConfiguration.TEZ_AM_LAUNCH_ENV, envStr); if (LOG.isDebugEnabled()) { LOG.debug("Setting MR AM env to : " + envStr); } } // Submit to ResourceManager try { dagAMConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, jobSubmitDir); // Set Tez parameters based on MR parameters. String queueName = jobConf.get(JobContext.QUEUE_NAME, YarnConfiguration.DEFAULT_QUEUE_NAME); dagAMConf.set(TezConfiguration.TEZ_QUEUE_NAME, queueName); int amMemMB = jobConf.getInt(MRJobConfig.MR_AM_VMEM_MB, MRJobConfig.DEFAULT_MR_AM_VMEM_MB); int amCores = jobConf.getInt(MRJobConfig.MR_AM_CPU_VCORES, MRJobConfig.DEFAULT_MR_AM_CPU_VCORES); dagAMConf.setInt(TezConfiguration.TEZ_AM_RESOURCE_MEMORY_MB, amMemMB); dagAMConf.setInt(TezConfiguration.TEZ_AM_RESOURCE_CPU_VCORES, amCores); dagAMConf.setInt(TezConfiguration.TEZ_AM_MAX_APP_ATTEMPTS, jobConf.getInt(MRJobConfig.MR_AM_MAX_ATTEMPTS, MRJobConfig.DEFAULT_MR_AM_MAX_ATTEMPTS)); tezClient = new MRTezClient("MapReduce", dagAMConf, false, jobLocalResources, ts); tezClient.start(); tezClient.submitDAGApplication(appId, dag); tezClient.stop(); } catch (TezException e) { throw new IOException(e); } return getJobStatus(jobId); } private LocalResource createApplicationResource(FileContext fs, Path p, LocalResourceType type) throws IOException { LocalResource rsrc = Records.newRecord(LocalResource.class); FileStatus rsrcStat = fs.getFileStatus(p); rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs .getDefaultFileSystem().resolvePath(rsrcStat.getPath()))); rsrc.setSize(rsrcStat.getLen()); rsrc.setTimestamp(rsrcStat.getModificationTime()); rsrc.setType(type); rsrc.setVisibility(LocalResourceVisibility.APPLICATION); return rsrc; } @Override public void setJobPriority(JobID arg0, String arg1) throws IOException, InterruptedException { resMgrDelegate.setJobPriority(arg0, arg1); } @Override public long getProtocolVersion(String arg0, long arg1) throws IOException { return resMgrDelegate.getProtocolVersion(arg0, arg1); } @Override public long renewDelegationToken(Token<DelegationTokenIdentifier> arg0) throws IOException, InterruptedException { throw new UnsupportedOperationException("Use Token.renew instead"); } @Override public Counters getJobCounters(JobID arg0) throws IOException, InterruptedException { return clientCache.getClient(arg0).getJobCounters(arg0); } @Override public String getJobHistoryDir() throws IOException, InterruptedException { return JobHistoryUtils.getConfiguredHistoryServerDoneDirPrefix(conf); } @Override public JobStatus getJobStatus(JobID jobID) throws IOException, InterruptedException { String user = UserGroupInformation.getCurrentUser().getShortUserName(); String jobFile = MRApps.getJobFile(conf, user, jobID); DAGStatus dagStatus; try { if(dagClient == null) { dagClient = MRTezClient.getDAGClient(TypeConverter.toYarn(jobID).getAppId(), tezConf, null); } dagStatus = dagClient.getDAGStatus(null); return new DAGJobStatus(dagClient.getApplicationReport(), dagStatus, jobFile); } catch (TezException e) { throw new IOException(e); } } @Override public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2) throws IOException, InterruptedException { return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2); } @Override public String[] getTaskDiagnostics(TaskAttemptID arg0) throws IOException, InterruptedException { return clientCache.getClient(arg0.getJobID()).getTaskDiagnostics(arg0); } @Override public TaskReport[] getTaskReports(JobID jobID, TaskType taskType) throws IOException, InterruptedException { return clientCache.getClient(jobID) .getTaskReports(jobID, taskType); } @Override public void killJob(JobID arg0) throws IOException, InterruptedException { /* check if the status is not running, if not send kill to RM */ JobStatus status = getJobStatus(arg0); if (status.getState() == JobStatus.State.RUNNING || status.getState() == JobStatus.State.PREP) { try { resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId()); } catch (YarnException e) { throw new IOException(e); } return; } } @Override public boolean killTask(TaskAttemptID arg0, boolean arg1) throws IOException, InterruptedException { return clientCache.getClient(arg0.getJobID()).killTask(arg0, arg1); } @Override public AccessControlList getQueueAdmins(String arg0) throws IOException { return new AccessControlList("*"); } @Override public JobTrackerStatus getJobTrackerStatus() throws IOException, InterruptedException { return JobTrackerStatus.RUNNING; } @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) throws IOException { return ProtocolSignature.getProtocolSignature(this, protocol, clientVersion, clientMethodsHash); } @Override public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException { try { return clientCache.getClient(jobID).getLogFilePath(jobID, taskAttemptID); } catch (YarnException e) { throw new IOException(e); } } private static void warnForJavaLibPath(String opts, String component, String javaConf, String envConf) { if (opts != null && opts.contains("-Djava.library.path")) { LOG.warn("Usage of -Djava.library.path in " + javaConf + " can cause " + "programs to no longer function if hadoop native libraries " + "are used. These values should be set as part of the " + "LD_LIBRARY_PATH in the " + component + " JVM env using " + envConf + " config settings."); } } @Private private static DataSourceDescriptor configureMRInputWithLegacySplitsGenerated(Configuration conf, boolean useLegacyInput) { InputDescriptor inputDescriptor; try { inputDescriptor = InputDescriptor.create(useLegacyInput ? MRInputLegacy.class .getName() : MRInput.class.getName()) .setUserPayload(MRInputHelpersInternal.createMRInputPayload(conf, null)); } catch (IOException e) { throw new TezUncheckedException(e); } DataSourceDescriptor dsd = DataSourceDescriptor.create(inputDescriptor, null, null); if (conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT, TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT_DEFAULT)) { dsd.getInputDescriptor().setHistoryText(TezUtils.convertToHistoryText(conf)); } return dsd; } private static class MRInputHelpersInternal extends MRInputHelpers { protected static UserPayload createMRInputPayload(Configuration conf, MRRuntimeProtos.MRSplitsProto mrSplitsProto) throws IOException { return MRInputHelpers.createMRInputPayload(conf, mrSplitsProto); } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.avroinput; import java.util.ArrayList; import java.util.List; import org.apache.avro.Schema; import org.apache.commons.vfs2.FileObject; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.hadoop.HadoopSpoonPlugin; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.textfileinput.TextFileInputMeta; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.vfs.ui.VfsFileChooserDialog; /** * Dialog for the Avro input step. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @version $Revision$ */ public class AvroInputDialog extends BaseStepDialog implements StepDialogInterface { private static final Class<?> PKG = AvroInputMeta.class; private final AvroInputMeta m_currentMeta; private final AvroInputMeta m_originalMeta; private CTabFolder m_wTabFolder; private CTabItem m_wSourceTab; private CTabItem m_wSchemaTab; private CTabItem m_wFieldsTab; private CTabItem m_wVarsTab; /** various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; private Button m_sourceInFileBut; private Button m_sourceInFieldBut; private Label m_defaultSchemaL; private Button m_schemaInFieldBut; private Label m_schemaInFieldIsPathL; private Button m_schemaInFieldIsPathBut; private Label m_cacheSchemasL; private Button m_cacheSchemasBut; private Label m_schemaFieldNameL; private CCombo m_schemaFieldNameText; private TextVar m_avroFilenameText; private Button m_avroFileBrowse; private TextVar m_schemaFilenameText; private Button m_schemaFileBrowse; private CCombo m_avroFieldNameText; private Button m_jsonEncodedBut; private Button m_missingFieldsBut; private Button m_getFields; private TableView m_fieldsView; private Button m_getLookupFieldsBut; private TableView m_lookupView; public AvroInputDialog( Shell parent, Object in, TransMeta tr, String name ) { super( parent, (BaseStepMeta) in, tr, name ); m_currentMeta = (AvroInputMeta) in; m_originalMeta = (AvroInputMeta) m_currentMeta.clone(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "AvroInputDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( BaseMessages.getString( PKG, "AvroInputDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // start of the source tab m_wSourceTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wSourceTab.setText( BaseMessages.getString( PKG, "AvroInputDialog.SourceTab.Title" ) ); Composite wSourceComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wSourceComp ); FormLayout sourceLayout = new FormLayout(); sourceLayout.marginWidth = 3; sourceLayout.marginHeight = 3; wSourceComp.setLayout( sourceLayout ); // source in file line Label fileSourceL = new Label( wSourceComp, SWT.RIGHT ); props.setLook( fileSourceL ); fileSourceL.setText( BaseMessages.getString( PKG, "AvroInputDialog.FileSource.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( middle, -margin ); fileSourceL.setLayoutData( fd ); m_sourceInFileBut = new Button( wSourceComp, SWT.CHECK ); props.setLook( m_sourceInFileBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); m_sourceInFileBut.setLayoutData( fd ); m_sourceInFileBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); m_sourceInFieldBut.setSelection( !m_sourceInFileBut.getSelection() ); checkWidgets(); } } ); // source in field line Label fieldSourceL = new Label( wSourceComp, SWT.RIGHT ); props.setLook( fieldSourceL ); fieldSourceL.setText( BaseMessages.getString( PKG, "AvroInputDialog.FieldSource.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_sourceInFileBut, margin ); fd.right = new FormAttachment( middle, -margin ); fieldSourceL.setLayoutData( fd ); m_sourceInFieldBut = new Button( wSourceComp, SWT.CHECK ); props.setLook( m_sourceInFieldBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_sourceInFileBut, margin ); m_sourceInFieldBut.setLayoutData( fd ); m_sourceInFieldBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); m_sourceInFileBut.setSelection( !m_sourceInFieldBut.getSelection() ); checkWidgets(); } } ); // filename line Label filenameL = new Label( wSourceComp, SWT.RIGHT ); props.setLook( filenameL ); filenameL.setText( BaseMessages.getString( PKG, "AvroInputDialog.Filename.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_sourceInFieldBut, margin ); fd.right = new FormAttachment( middle, -margin ); filenameL.setLayoutData( fd ); m_avroFileBrowse = new Button( wSourceComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_avroFileBrowse ); m_avroFileBrowse.setText( BaseMessages.getString( PKG, "AvroInputDialog.Button.FileBrowse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_sourceInFieldBut, 0 ); m_avroFileBrowse.setLayoutData( fd ); // add listener to pop up VFS browse dialog m_avroFileBrowse.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { try { String[] fileFilters = new String[] { "*" }; String[] fileFilterNames = new String[] { BaseMessages.getString( TextFileInputMeta.class, "System.FileType.AllFiles" ) }; // get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; if ( m_avroFilenameText.getText() != null ) { String fname = transMeta.environmentSubstitute( m_avroFilenameText.getText() ); if ( !Const.isEmpty( fname ) ) { initialFile = KettleVFS.getFileObject( fname ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getFileObject( Spoon.getInstance().getLastFileOpened() ); } } else { defaultInitialFile = KettleVFS.getFileObject( "file:///c:/" ); } if ( rootFile == null ) { rootFile = defaultInitialFile.getFileSystem().getRoot(); } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; FileObject selectedFile = fileChooserDialog.open( shell, null, HadoopSpoonPlugin.HDFS_SCHEME, true, null, fileFilters, fileFilterNames, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE ); if ( selectedFile != null ) { m_avroFilenameText.setText( selectedFile.getURL().toString() ); } } catch ( Exception ex ) { logError( BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" ), ex ); } } } ); m_avroFilenameText = new TextVar( transMeta, wSourceComp, SWT.SIMPLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_avroFilenameText ); m_avroFilenameText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_avroFilenameText.setToolTipText( transMeta.environmentSubstitute( m_avroFilenameText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_sourceInFieldBut, margin ); fd.right = new FormAttachment( m_avroFileBrowse, -margin ); m_avroFilenameText.setLayoutData( fd ); Label avroFieldNameL = new Label( wSourceComp, SWT.RIGHT ); props.setLook( avroFieldNameL ); avroFieldNameL.setText( BaseMessages.getString( PKG, "AvroInputDialog.AvroField.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_avroFilenameText, margin ); fd.right = new FormAttachment( middle, -margin ); avroFieldNameL.setLayoutData( fd ); m_avroFieldNameText = new CCombo( wSourceComp, SWT.BORDER ); props.setLook( m_avroFieldNameText ); m_avroFieldNameText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_avroFieldNameText.setToolTipText( transMeta.environmentSubstitute( m_avroFieldNameText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_avroFilenameText, margin ); fd.right = new FormAttachment( 100, 0 ); m_avroFieldNameText.setLayoutData( fd ); // json encoded check box Label jsonL = new Label( wSourceComp, SWT.RIGHT ); props.setLook( jsonL ); jsonL.setText( BaseMessages.getString( PKG, "AvroInputDialog.JsonEncoded.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_avroFieldNameText, margin ); fd.right = new FormAttachment( middle, -margin ); jsonL.setLayoutData( fd ); jsonL.setToolTipText( BaseMessages.getString( PKG, "AvroInputDialog.JsonEncoded.TipText" ) ); m_jsonEncodedBut = new Button( wSourceComp, SWT.CHECK ); props.setLook( m_jsonEncodedBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_avroFieldNameText, margin ); m_jsonEncodedBut.setLayoutData( fd ); m_jsonEncodedBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); } } ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wSourceComp.setLayoutData( fd ); wSourceComp.layout(); m_wSourceTab.setControl( wSourceComp ); // -- start of the schema tab m_wSchemaTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wSchemaTab.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaTab.Title" ) ); Composite wSchemaComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wSchemaComp ); FormLayout schemaLayout = new FormLayout(); schemaLayout.marginWidth = 3; schemaLayout.marginHeight = 3; wSchemaComp.setLayout( schemaLayout ); // schema filename line m_defaultSchemaL = new Label( wSchemaComp, SWT.RIGHT ); props.setLook( m_defaultSchemaL ); m_defaultSchemaL.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaFilename.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( middle, -margin ); m_defaultSchemaL.setLayoutData( fd ); m_defaultSchemaL.setToolTipText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaFilename.TipText" ) ); m_schemaFileBrowse = new Button( wSchemaComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_schemaFileBrowse ); m_schemaFileBrowse.setText( BaseMessages.getString( PKG, "AvroInputDialog.Button.FileBrowse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); m_schemaFileBrowse.setLayoutData( fd ); // add listener to pop up VFS browse dialog m_schemaFileBrowse.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { try { String[] fileFilters = new String[] { "*" }; String[] fileFilterNames = new String[] { BaseMessages.getString( TextFileInputMeta.class, "System.FileType.AllFiles" ) }; // get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; if ( m_schemaFilenameText.getText() != null ) { String fname = transMeta.environmentSubstitute( m_schemaFilenameText.getText() ); if ( !Const.isEmpty( fname ) ) { initialFile = KettleVFS.getFileObject( fname ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getFileObject( Spoon.getInstance().getLastFileOpened() ); } } else { defaultInitialFile = KettleVFS.getFileObject( "file:///c:/" ); } if ( rootFile == null ) { rootFile = defaultInitialFile.getFileSystem().getRoot(); } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; FileObject selectedFile = fileChooserDialog.open( shell, null, HadoopSpoonPlugin.HDFS_SCHEME, true, null, fileFilters, fileFilterNames, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE ); if ( selectedFile != null ) { m_schemaFilenameText.setText( selectedFile.getURL().toString() ); } } catch ( Exception ex ) { logError( BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" ), ex ); } } } ); m_schemaFilenameText = new TextVar( transMeta, wSchemaComp, SWT.SIMPLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_schemaFilenameText ); m_schemaFilenameText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_avroFilenameText.setToolTipText( transMeta.environmentSubstitute( m_schemaFilenameText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( m_schemaFileBrowse, -margin ); m_schemaFilenameText.setLayoutData( fd ); // Schema in field line Label schemaInFieldL = new Label( wSchemaComp, SWT.RIGHT ); props.setLook( schemaInFieldL ); schemaInFieldL.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaInField.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_schemaFilenameText, margin ); fd.right = new FormAttachment( middle, -margin ); schemaInFieldL.setLayoutData( fd ); m_schemaInFieldBut = new Button( wSchemaComp, SWT.CHECK ); props.setLook( m_schemaInFieldBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_schemaFilenameText, margin ); m_schemaInFieldBut.setLayoutData( fd ); m_schemaInFieldBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); checkWidgets(); } } ); // schema is path line m_schemaInFieldIsPathL = new Label( wSchemaComp, SWT.RIGHT ); props.setLook( m_schemaInFieldIsPathL ); m_schemaInFieldIsPathL.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaInFieldIsPath.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_schemaInFieldBut, margin ); fd.right = new FormAttachment( middle, -margin ); m_schemaInFieldIsPathL.setLayoutData( fd ); m_schemaInFieldIsPathBut = new Button( wSchemaComp, SWT.CHECK ); props.setLook( m_schemaInFieldIsPathBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_schemaInFieldBut, margin ); m_schemaInFieldIsPathBut.setLayoutData( fd ); // cache schemas line m_cacheSchemasL = new Label( wSchemaComp, SWT.RIGHT ); props.setLook( m_cacheSchemasL ); m_cacheSchemasL.setText( BaseMessages.getString( PKG, "AvroInputDialog.CacheSchemas.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_schemaInFieldIsPathBut, margin ); fd.right = new FormAttachment( middle, -margin ); m_cacheSchemasL.setLayoutData( fd ); m_cacheSchemasBut = new Button( wSchemaComp, SWT.CHECK ); props.setLook( m_cacheSchemasBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_schemaInFieldIsPathBut, margin ); m_cacheSchemasBut.setLayoutData( fd ); // schema field name line m_schemaFieldNameL = new Label( wSchemaComp, SWT.RIGHT ); props.setLook( m_schemaFieldNameL ); m_schemaFieldNameL.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaFieldName.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_cacheSchemasBut, margin ); fd.right = new FormAttachment( middle, -margin ); m_schemaFieldNameL.setLayoutData( fd ); m_schemaFieldNameText = new CCombo( wSchemaComp, SWT.BORDER ); props.setLook( m_schemaFieldNameText ); m_schemaFieldNameText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_schemaFieldNameText.setToolTipText( transMeta.environmentSubstitute( m_schemaFieldNameText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_cacheSchemasBut, margin ); fd.right = new FormAttachment( 100, 0 ); m_schemaFieldNameText.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wSchemaComp.setLayoutData( fd ); wSchemaComp.layout(); m_wSchemaTab.setControl( wSchemaComp ); // -- start of the fields tab m_wFieldsTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wFieldsTab.setText( BaseMessages.getString( PKG, "AvroInputDialog.FieldsTab.Title" ) ); Composite wFieldsComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wFieldsComp ); FormLayout fieldsLayout = new FormLayout(); fieldsLayout.marginWidth = 3; fieldsLayout.marginHeight = 3; wFieldsComp.setLayout( fieldsLayout ); // missing fields button Label missingFieldsLab = new Label( wFieldsComp, SWT.RIGHT ); props.setLook( missingFieldsLab ); missingFieldsLab.setText( BaseMessages.getString( PKG, "AvroInputDialog.MissingFields.Label" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( middle, -margin ); missingFieldsLab.setLayoutData( fd ); m_missingFieldsBut = new Button( wFieldsComp, SWT.CHECK ); props.setLook( m_missingFieldsBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); m_missingFieldsBut.setLayoutData( fd ); // get fields button m_getFields = new Button( wFieldsComp, SWT.PUSH ); m_getFields.setText( BaseMessages.getString( PKG, "AvroInputDialog.Button.GetFields" ) ); props.setLook( m_getFields ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); m_getFields.setLayoutData( fd ); m_getFields.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { // populate table from schema getFields(); } } ); wPreview = new Button( wFieldsComp, SWT.PUSH | SWT.CENTER ); wPreview.setText( BaseMessages.getString( PKG, "System.Button.Preview" ) ); props.setLook( wPreview ); fd = new FormData(); fd.right = new FormAttachment( m_getFields, margin ); fd.bottom = new FormAttachment( 100, 0 ); wPreview.setLayoutData( fd ); wPreview.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { preview(); } } ); // fields stuff final ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.FIELD_NAME" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.FIELD_PATH" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.FIELD_TYPE" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.FIELD_INDEXED" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), }; colinf[2].setComboValues( ValueMeta.getTypes() ); m_fieldsView = new TableView( transMeta, wFieldsComp, SWT.FULL_SELECTION | SWT.MULTI, colinf, 1, lsMod, props ); fd = new FormData(); fd.top = new FormAttachment( m_missingFieldsBut, margin * 2 ); fd.bottom = new FormAttachment( m_getFields, -margin * 2 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); m_fieldsView.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wFieldsComp.setLayoutData( fd ); wFieldsComp.layout(); m_wFieldsTab.setControl( wFieldsComp ); // -- start of the variables tab m_wVarsTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wVarsTab.setText( BaseMessages.getString( PKG, "AvroInputDialog.VarsTab.Title" ) ); Composite wVarsComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wVarsComp ); FormLayout varsLayout = new FormLayout(); varsLayout.marginWidth = 3; varsLayout.marginHeight = 3; wVarsComp.setLayout( varsLayout ); // lookup fields (variables) tab final ColumnInfo[] colinf2 = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.LOOKUP_NAME" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.LOOKUP_VARIABLE" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( PKG, "AvroInputDialog.Fields.LOOKUP_DEFAULT_VALUE" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), }; // get lookup fields but m_getLookupFieldsBut = new Button( wVarsComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_getLookupFieldsBut ); m_getLookupFieldsBut.setText( BaseMessages.getString( PKG, "AvroInputDialog.Button.GetLookupFields" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_getLookupFieldsBut.setLayoutData( fd ); m_getLookupFieldsBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { // get incoming field names getIncomingFields(); } } ); m_lookupView = new TableView( transMeta, wVarsComp, SWT.FULL_SELECTION | SWT.MULTI, colinf2, 1, lsMod, props ); fd = new FormData(); fd.top = new FormAttachment( 0, margin * 2 ); fd.bottom = new FormAttachment( m_getLookupFieldsBut, -margin * 2 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); m_lookupView.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wVarsComp.setLayoutData( fd ); wVarsComp.layout(); m_wVarsTab.setControl( wVarsComp ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); populateFieldsCombo(); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } protected void ok() { if ( Const.isEmpty( m_stepnameText.getText() ) ) { return; } stepname = m_stepnameText.getText(); setMeta( m_currentMeta ); if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void setMeta( AvroInputMeta avroMeta ) { avroMeta.setFilename( m_avroFilenameText.getText() ); avroMeta.setSchemaFilename( m_schemaFilenameText.getText() ); avroMeta.setAvroIsJsonEncoded( m_jsonEncodedBut.getSelection() ); avroMeta.setAvroInField( m_sourceInFieldBut.getSelection() ); avroMeta.setAvroFieldName( m_avroFieldNameText.getText() ); avroMeta.setSchemaInField( m_schemaInFieldBut.getSelection() ); avroMeta.setSchemaInFieldIsPath( m_schemaInFieldIsPathBut.getSelection() ); avroMeta.setCacheSchemasInMemory( m_cacheSchemasBut.getSelection() ); avroMeta.setSchemaFieldName( m_schemaFieldNameText.getText() ); avroMeta.setDontComplainAboutMissingFields( m_missingFieldsBut.getSelection() ); int numNonEmpty = m_fieldsView.nrNonEmpty(); if ( numNonEmpty > 0 ) { List<AvroInputMeta.AvroField> outputFields = new ArrayList<AvroInputMeta.AvroField>(); for ( int i = 0; i < numNonEmpty; i++ ) { TableItem item = m_fieldsView.getNonEmpty( i ); AvroInputMeta.AvroField newField = new AvroInputMeta.AvroField(); newField.m_fieldName = item.getText( 1 ).trim(); newField.m_fieldPath = item.getText( 2 ).trim(); newField.m_kettleType = item.getText( 3 ).trim(); if ( !Const.isEmpty( item.getText( 4 ) ) ) { newField.m_indexedVals = AvroInputMeta.indexedValsList( item.getText( 4 ).trim() ); } outputFields.add( newField ); } avroMeta.setAvroFields( outputFields ); } numNonEmpty = m_lookupView.nrNonEmpty(); if ( numNonEmpty > 0 ) { List<AvroInputMeta.LookupField> varFields = new ArrayList<AvroInputMeta.LookupField>(); for ( int i = 0; i < numNonEmpty; i++ ) { TableItem item = m_lookupView.getNonEmpty( i ); AvroInputMeta.LookupField newField = new AvroInputMeta.LookupField(); boolean add = false; newField.m_fieldName = item.getText( 1 ).trim(); if ( !Const.isEmpty( item.getText( 2 ) ) ) { newField.m_variableName = item.getText( 2 ).trim(); add = true; if ( !Const.isEmpty( item.getText( 3 ) ) ) { newField.m_defaultValue = item.getText( 3 ).trim(); } } if ( add ) { varFields.add( newField ); } } avroMeta.setLookupFields( varFields ); } } protected void getFields() { if ( !Const.isEmpty( m_schemaFilenameText.getText() ) ) { // this schema overrides any that might be in a container file String sName = m_schemaFilenameText.getText(); sName = transMeta.environmentSubstitute( sName ); try { Schema s = AvroInputData.loadSchema( sName ); List<AvroInputMeta.AvroField> schemaFields = AvroInputData.getLeafFields( s ); setTableFields( schemaFields ); } catch ( Exception ex ) { logError( BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" + " " + sName ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, "AvroInputDialog.Error.KettleFileException" + " " + sName ), ex ); } } else { String avroFileName = m_avroFilenameText.getText(); avroFileName = transMeta.environmentSubstitute( avroFileName ); try { Schema s = AvroInputData.loadSchemaFromContainer( avroFileName ); List<AvroInputMeta.AvroField> schemaFields = AvroInputData.getLeafFields( s ); setTableFields( schemaFields ); } catch ( Exception ex ) { logError( BaseMessages.getString( PKG, "AvroInput.Error.UnableToLoadSchemaFromContainerFile" ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, "AvroInput.Error.UnableToLoadSchemaFromContainerFile", avroFileName ), ex ); } } } protected void setTableFields( List<AvroInputMeta.AvroField> fields ) { m_fieldsView.clearAll(); for ( AvroInputMeta.AvroField f : fields ) { TableItem item = new TableItem( m_fieldsView.table, SWT.NONE ); if ( !Const.isEmpty( f.m_fieldName ) ) { item.setText( 1, f.m_fieldName ); } if ( !Const.isEmpty( f.m_fieldPath ) ) { item.setText( 2, f.m_fieldPath ); } if ( !Const.isEmpty( f.m_kettleType ) ) { item.setText( 3, f.m_kettleType ); } if ( f.m_indexedVals != null && f.m_indexedVals.size() > 0 ) { item.setText( 4, AvroInputMeta.indexedValsList( f.m_indexedVals ) ); } } m_fieldsView.removeEmptyRows(); m_fieldsView.setRowNums(); m_fieldsView.optWidth( true ); } protected void setVariableTableFields( List<AvroInputMeta.LookupField> fields ) { m_lookupView.clearAll(); for ( AvroInputMeta.LookupField f : fields ) { TableItem item = new TableItem( m_lookupView.table, SWT.NONE ); if ( !Const.isEmpty( f.m_fieldName ) ) { item.setText( 1, f.m_fieldName ); } if ( !Const.isEmpty( f.m_variableName ) ) { item.setText( 2, f.m_variableName ); } if ( !Const.isEmpty( f.m_defaultValue ) ) { item.setText( 3, f.m_defaultValue ); } } m_lookupView.removeEmptyRows(); m_lookupView.setRowNums(); m_lookupView.optWidth( true ); } protected void getData() { if ( !Const.isEmpty( m_currentMeta.getFilename() ) ) { m_avroFilenameText.setText( m_currentMeta.getFilename() ); } if ( !Const.isEmpty( m_currentMeta.getSchemaFilename() ) ) { m_schemaFilenameText.setText( m_currentMeta.getSchemaFilename() ); } if ( !Const.isEmpty( m_currentMeta.getAvroFieldName() ) ) { m_avroFieldNameText.setText( m_currentMeta.getAvroFieldName() ); } m_jsonEncodedBut.setSelection( m_currentMeta.getAvroIsJsonEncoded() ); m_sourceInFieldBut.setSelection( m_currentMeta.getAvroInField() ); if ( !m_currentMeta.getAvroInField() ) { m_sourceInFileBut.setSelection( true ); } m_schemaInFieldBut.setSelection( m_currentMeta.getSchemaInField() ); m_schemaInFieldIsPathBut.setSelection( m_currentMeta.getSchemaInFieldIsPath() ); m_cacheSchemasBut.setSelection( m_currentMeta.getCacheSchemasInMemory() ); m_missingFieldsBut.setSelection( m_currentMeta.getDontComplainAboutMissingFields() ); if ( !Const.isEmpty( m_currentMeta.getSchemaFieldName() ) ) { m_schemaFieldNameText.setText( m_currentMeta.getSchemaFieldName() ); } // fields if ( m_currentMeta.getAvroFields() != null && m_currentMeta.getAvroFields().size() > 0 ) { setTableFields( m_currentMeta.getAvroFields() ); } if ( m_currentMeta.getLookupFields() != null && m_currentMeta.getLookupFields().size() > 0 ) { setVariableTableFields( m_currentMeta.getLookupFields() ); } checkWidgets(); } private void checkWidgets() { boolean sifile = m_sourceInFileBut.getSelection(); m_avroFilenameText.setEnabled( sifile ); m_avroFileBrowse.setEnabled( sifile ); boolean sifield = m_sourceInFieldBut.getSelection(); if ( sifield ) { m_sourceInFileBut.setSelection( !sifield ); } m_avroFilenameText.setEnabled( !sifield ); m_avroFileBrowse.setEnabled( !sifield ); m_avroFieldNameText.setEnabled( sifield ); // } wPreview.setEnabled( m_sourceInFileBut.getSelection() ); if ( sifile ) { m_schemaInFieldBut.setSelection( false ); } m_schemaInFieldBut.setEnabled( !sifile ); boolean sField = m_schemaInFieldBut.getSelection(); m_schemaInFieldIsPathL.setEnabled( sField ); m_schemaInFieldIsPathBut.setEnabled( sField ); m_cacheSchemasL.setEnabled( sField ); m_cacheSchemasBut.setEnabled( sField ); m_schemaFieldNameL.setEnabled( sField ); m_schemaFieldNameText.setEnabled( sField ); if ( sField ) { m_defaultSchemaL.setText( BaseMessages.getString( PKG, "AvroInputDialog.DefaultSchemaFilename.Label" ) ); } else { m_defaultSchemaL.setText( BaseMessages.getString( PKG, "AvroInputDialog.SchemaFilename.Label" ) ); } } private void preview() { AvroInputMeta tempMeta = new AvroInputMeta(); setMeta( tempMeta ); TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, tempMeta, m_stepnameText.getText() ); transMeta.getVariable( "Internal.Transformation.Filename.Directory" ); previewMeta.getVariable( "Internal.Transformation.Filename.Directory" ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString( PKG, "CsvInputDialog.PreviewSize.DialogTitle" ), BaseMessages.getString( PKG, "AvroInputDialog.PreviewSize.DialogMessage" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { m_stepnameText.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() ) { if ( trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( PKG, "System.Dialog.PreviewError.Title" ), BaseMessages.getString( PKG, "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, m_stepnameText.getText(), progressDialog .getPreviewRowsMeta( m_stepnameText.getText() ), progressDialog.getPreviewRows( m_stepnameText.getText() ), loggingText ); prd.open(); } } private void getIncomingFields() { try { RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { BaseStepDialog.getFieldsFromPrevious( r, m_lookupView, 1, new int[] { 1 }, null, -1, -1, null ); } } catch ( KettleException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Title" ), BaseMessages .getString( PKG, "System.Dialog.GetFieldsFailed.Message" ), e ); } } private void populateFieldsCombo() { StepMeta stepMeta = transMeta.findStep( stepname ); if ( stepMeta != null ) { try { RowMetaInterface rowMeta = transMeta.getPrevStepFields( stepMeta ); if ( rowMeta != null && rowMeta.size() > 0 ) { m_avroFieldNameText.removeAll(); m_schemaFieldNameText.removeAll(); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface vm = rowMeta.getValueMeta( i ); String fieldName = vm.getName(); m_avroFieldNameText.add( fieldName ); m_schemaFieldNameText.add( fieldName ); } } } catch ( KettleException ex ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Title" ), BaseMessages .getString( PKG, "System.Dialog.GetFieldsFailed.Message" ), ex ); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.partitionsender; import java.io.IOException; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import org.apache.drill.common.expression.ErrorCollector; import org.apache.drill.common.expression.ErrorCollectorImpl; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.exception.ClassTransformationException; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.exception.SchemaChangeException; import org.apache.drill.exec.expr.ClassGenerator; import org.apache.drill.exec.expr.CodeGenerator; import org.apache.drill.exec.expr.ExpressionTreeMaterializer; import org.apache.drill.exec.ops.AccountingDataTunnel; import org.apache.drill.exec.ops.ExchangeFragmentContext; import org.apache.drill.exec.ops.MetricDef; import org.apache.drill.exec.ops.OperatorStats; import org.apache.drill.exec.ops.RootFragmentContext; import org.apache.drill.exec.physical.MinorFragmentEndpoint; import org.apache.drill.exec.physical.config.HashPartitionSender; import org.apache.drill.exec.physical.impl.BaseRootExec; import org.apache.drill.exec.planner.physical.PlannerSettings; import org.apache.drill.exec.proto.ExecProtos.FragmentHandle; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; import org.apache.drill.exec.record.CloseableRecordBatch; import org.apache.drill.exec.record.FragmentWritableBatch; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.record.RecordBatch.IterOutcome; import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.vector.CopyUtil; import com.carrotsearch.hppc.IntArrayList; import com.google.common.annotations.VisibleForTesting; import com.sun.codemodel.JExpr; import com.sun.codemodel.JExpression; import com.sun.codemodel.JType; public class PartitionSenderRootExec extends BaseRootExec { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PartitionSenderRootExec.class); private RecordBatch incoming; private HashPartitionSender operator; private PartitionerDecorator partitioner; private ExchangeFragmentContext context; private final int outGoingBatchCount; private final HashPartitionSender popConfig; private final double cost; private final AtomicIntegerArray remainingReceivers; private final AtomicInteger remaingReceiverCount; private boolean done = false; private boolean first = true; private boolean closeIncoming; long minReceiverRecordCount = Long.MAX_VALUE; long maxReceiverRecordCount = Long.MIN_VALUE; protected final int numberPartitions; protected final int actualPartitions; private IntArrayList terminations = new IntArrayList(); public enum Metric implements MetricDef { BATCHES_SENT, RECORDS_SENT, MIN_RECORDS, MAX_RECORDS, N_RECEIVERS, BYTES_SENT, SENDING_THREADS_COUNT, COST; @Override public int metricId() { return ordinal(); } } public PartitionSenderRootExec(RootFragmentContext context, RecordBatch incoming, HashPartitionSender operator) throws OutOfMemoryException { this(context, incoming, operator, false); } public PartitionSenderRootExec(RootFragmentContext context, RecordBatch incoming, HashPartitionSender operator, boolean closeIncoming) throws OutOfMemoryException { super(context, context.newOperatorContext(operator, null), operator); this.incoming = incoming; this.operator = operator; this.closeIncoming = closeIncoming; this.context = context; outGoingBatchCount = operator.getDestinations().size(); popConfig = operator; remainingReceivers = new AtomicIntegerArray(outGoingBatchCount); remaingReceiverCount = new AtomicInteger(outGoingBatchCount); stats.setLongStat(Metric.N_RECEIVERS, outGoingBatchCount); // Algorithm to figure out number of threads to parallelize output // numberOfRows/sliceTarget/numReceivers/threadfactor this.cost = operator.getChild().getCost(); final OptionManager optMgr = context.getOptions(); long sliceTarget = optMgr.getOption(ExecConstants.SLICE_TARGET).num_val; int threadFactor = optMgr.getOption(PlannerSettings.PARTITION_SENDER_THREADS_FACTOR.getOptionName()).num_val.intValue(); int tmpParts = 1; if ( sliceTarget != 0 && outGoingBatchCount != 0 ) { tmpParts = (int) Math.round((((cost / (sliceTarget*1.0)) / (outGoingBatchCount*1.0)) / (threadFactor*1.0))); if ( tmpParts < 1) { tmpParts = 1; } } final int imposedThreads = optMgr.getOption(PlannerSettings.PARTITION_SENDER_SET_THREADS.getOptionName()).num_val.intValue(); if (imposedThreads > 0 ) { this.numberPartitions = imposedThreads; } else { this.numberPartitions = Math.min(tmpParts, optMgr.getOption(PlannerSettings.PARTITION_SENDER_MAX_THREADS.getOptionName()).num_val.intValue()); } logger.info("Preliminary number of sending threads is: " + numberPartitions); this.actualPartitions = outGoingBatchCount > numberPartitions ? numberPartitions : outGoingBatchCount; this.stats.setLongStat(Metric.SENDING_THREADS_COUNT, actualPartitions); this.stats.setDoubleStat(Metric.COST, this.cost); } @Override public boolean innerNext() { IterOutcome out; if (!done) { out = next(incoming); } else { incoming.kill(true); out = IterOutcome.NONE; } logger.debug("Partitioner.next(): got next record batch with status {}", out); if (first && out == IterOutcome.OK) { out = IterOutcome.OK_NEW_SCHEMA; } switch(out){ case NONE: try { // send any pending batches if(partitioner != null) { partitioner.flushOutgoingBatches(true, false); } else { sendEmptyBatch(true); } } catch (ExecutionException e) { incoming.kill(false); logger.error("Error while creating partitioning sender or flushing outgoing batches", e); context.getExecutorState().fail(e.getCause()); } return false; case OUT_OF_MEMORY: throw new OutOfMemoryException(); case STOP: if (partitioner != null) { partitioner.clear(); } return false; case OK_NEW_SCHEMA: try { // send all existing batches if (partitioner != null) { partitioner.flushOutgoingBatches(false, true); partitioner.clear(); } createPartitioner(); if (first) { // Send an empty batch for fast schema first = false; sendEmptyBatch(false); } } catch (ExecutionException e) { incoming.kill(false); logger.error("Error while flushing outgoing batches", e); context.getExecutorState().fail(e.getCause()); return false; } catch (SchemaChangeException e) { incoming.kill(false); logger.error("Error while setting up partitioner", e); context.getExecutorState().fail(e); return false; } case OK: try { partitioner.partitionBatch(incoming); } catch (ExecutionException e) { context.getExecutorState().fail(e.getCause()); incoming.kill(false); return false; } for (VectorWrapper<?> v : incoming) { v.clear(); } return true; case NOT_YET: default: throw new IllegalStateException(); } } @VisibleForTesting protected void createPartitioner() throws SchemaChangeException { final int divisor = Math.max(1, outGoingBatchCount/actualPartitions); final int longTail = outGoingBatchCount % actualPartitions; final List<Partitioner> subPartitioners = createClassInstances(actualPartitions); int startIndex = 0; int endIndex = 0; boolean success = false; try { for (int i = 0; i < actualPartitions; i++) { startIndex = endIndex; endIndex = (i < actualPartitions - 1) ? startIndex + divisor : outGoingBatchCount; if (i < longTail) { endIndex++; } final OperatorStats partitionStats = new OperatorStats(stats, true); subPartitioners.get(i).setup(context, incoming, popConfig, partitionStats, oContext, startIndex, endIndex); } partitioner = new PartitionerDecorator(subPartitioners, stats, context); for (int index = 0; index < terminations.size(); index++) { partitioner.getOutgoingBatches(terminations.buffer[index]).terminate(); } terminations.clear(); success = true; } finally { if (!success) { for (Partitioner p : subPartitioners) { p.clear(); } } } } private List<Partitioner> createClassInstances(int actualPartitions) throws SchemaChangeException { // set up partitioning function final LogicalExpression expr = operator.getExpr(); final ErrorCollector collector = new ErrorCollectorImpl(); final ClassGenerator<Partitioner> cg ; cg = CodeGenerator.getRoot(Partitioner.TEMPLATE_DEFINITION, context.getOptions()); cg.getCodeGenerator().plainJavaCapable(true); // Uncomment out this line to debug the generated code. // cg.getCodeGenerator().saveCodeForDebugging(true); ClassGenerator<Partitioner> cgInner = cg.getInnerGenerator("OutgoingRecordBatch"); final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, incoming, collector, context.getFunctionRegistry()); if (collector.hasErrors()) { throw new SchemaChangeException(String.format( "Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } // generate code to copy from an incoming value vector to the destination partition's outgoing value vector JExpression bucket = JExpr.direct("bucket"); // generate evaluate expression to determine the hash ClassGenerator.HoldingContainer exprHolder = cg.addExpr(materializedExpr); cg.getEvalBlock().decl(JType.parse(cg.getModel(), "int"), "bucket", exprHolder.getValue().mod(JExpr.lit(outGoingBatchCount))); cg.getEvalBlock()._return(cg.getModel().ref(Math.class).staticInvoke("abs").arg(bucket)); CopyUtil.generateCopies(cgInner, incoming, incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE); try { // compile and setup generated code List<Partitioner> subPartitioners = context.getImplementationClass(cg, actualPartitions); return subPartitioners; } catch (ClassTransformationException | IOException e) { throw new SchemaChangeException("Failure while attempting to load generated class", e); } } /** * Find min and max record count seen across the outgoing batches and put them in stats. */ private void updateAggregateStats() { for (Partitioner part : partitioner.getPartitioners() ) { for (PartitionOutgoingBatch o : part.getOutgoingBatches()) { long totalRecords = o.getTotalRecords(); minReceiverRecordCount = Math.min(minReceiverRecordCount, totalRecords); maxReceiverRecordCount = Math.max(maxReceiverRecordCount, totalRecords); } } stats.setLongStat(Metric.MIN_RECORDS, minReceiverRecordCount); stats.setLongStat(Metric.MAX_RECORDS, maxReceiverRecordCount); } @Override public void receivingFragmentFinished(FragmentHandle handle) { final int id = handle.getMinorFragmentId(); if (remainingReceivers.compareAndSet(id, 0, 1)) { if (partitioner == null) { terminations.add(id); } else { partitioner.getOutgoingBatches(id).terminate(); } int remaining = remaingReceiverCount.decrementAndGet(); if (remaining == 0) { done = true; } } } @Override public void close() throws Exception { logger.debug("Partition sender stopping."); super.close(); if (partitioner != null) { updateAggregateStats(); partitioner.clear(); } if (closeIncoming) { ((CloseableRecordBatch) incoming).close(); } } private void sendEmptyBatch(boolean isLast) { BatchSchema schema = incoming.getSchema(); if (schema == null) { // If the incoming batch has no schema (possible when there are no input records), // create an empty schema to avoid NPE. schema = BatchSchema.newBuilder().build(); } FragmentHandle handle = context.getHandle(); for (MinorFragmentEndpoint destination : popConfig.getDestinations()) { AccountingDataTunnel tunnel = context.getDataTunnel(destination.getEndpoint()); FragmentWritableBatch writableBatch = FragmentWritableBatch.getEmptyBatchWithSchema( isLast, handle.getQueryId(), handle.getMajorFragmentId(), handle.getMinorFragmentId(), operator.getOppositeMajorFragmentId(), destination.getId(), schema); stats.startWait(); try { tunnel.sendRecordBatch(writableBatch); } finally { stats.stopWait(); } } stats.addLongStat(Metric.BATCHES_SENT, 1); } @VisibleForTesting protected PartitionerDecorator getPartitioner() { return partitioner; } }
package edu.gemini.spModel.util; import edu.gemini.pot.sp.*; import edu.gemini.spModel.gemini.altair.InstAltair; import edu.gemini.spModel.gemini.obscomp.SPSiteQuality; import edu.gemini.spModel.obslog.ObsExecLog; import edu.gemini.spModel.obsrecord.ObsExecRecord; import edu.gemini.spModel.target.obsComp.TargetObsComp; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * Utility class with operations on the science program tree model. */ public class SPTreeUtil { private static final List<ISPSeqComponent> _EMPTY_LIST = new ArrayList<>(0); /** * Return true if ans contains des (if des is a descendant of ans). * * @param anc check if this node is an ancestor * @param des check if this node is a descendant of node1 */ public static boolean nodeContainsNode(ISPNode anc, ISPNode des) { return anc != null && contains(anc, des); } private static boolean contains(ISPNode anc, ISPNode des) { if (des == null) return false; final ISPNode parent = des.getParent(); return anc.equals(parent) || contains(anc, parent); } /** * Finds the node with the given SPNodeKey if it is nested inside * <code>parent</code>. * * @param parent root of the subtree to search * @param key node key of the node to find * * @return matching ISPNode with the given <code>key</code> if it * exists inside of <code>parent</code>; <code>null</code> otherwise */ public static ISPNode findByKey(ISPNode parent, SPNodeKey key) { if (key.equals(parent.getNodeKey())) return parent; if (parent instanceof ISPContainerNode) { for (final ISPNode child : ((ISPContainerNode) parent).getChildren()) { final ISPNode res = findByKey(child, key); if (res != null) return res; } } return null; } /** * Find and return the nearest observation container corresponding to the given SP tree node. * * @param node the science program tree node * @return a group or program node, or null if not found in the node's hierarchy */ public static ISPObservationContainer findObservationContainer(ISPNode node) { if (node == null) return null; if (node instanceof ISPObservationContainer) return (ISPObservationContainer) node; do { ISPNode parent = node.getParent(); if (parent instanceof ISPObservationContainer) { return (ISPObservationContainer)parent; } node = parent; } while(node != null); return null; } /** * Find and return the TargetEnv tree node corresponding to the given observation * (That is the node whose data object is a TargetEnv). */ public static ISPObsComponent findTargetEnvNode(ISPObservation o) { for (ISPObsComponent obsComp : o.getObsComponents()) { if (isTargetEnv(obsComp)) { return obsComp; } } return null; } /** * Find and return the Observing Conditions (SPSiteQuality) tree node corresponding to the given observation */ public static ISPObsComponent findObsCondNode(ISPObservation o) { for (ISPObsComponent obsComp : o.getObsComponents()) { if (obsComp.getType().equals(SPSiteQuality.SP_TYPE)) { return obsComp; } } return null; } /** * Return true if the given component is a TargetEnv component. */ public static boolean isTargetEnv(ISPObsComponent obsComp) { return TargetObsComp.SP_TYPE.equals(obsComp.getType()); } /** * Return true if the given component is an instrument component. */ public static boolean isInstrument(ISPObsComponent obsComp) { return SPComponentBroadType.INSTRUMENT.equals(obsComp.getType().broadType); } /** * Return true if the given component is an Altair component. */ public static boolean isAltair(ISPObsComponent obsComp) { return InstAltair.SP_TYPE.equals(obsComp.getType()); } /** * Find and return the instrument nodes corresponding to the given observation. * The return value will be a list containing the instrument (if found) and an * Altair component (if found). */ public static List<ISPObsComponent> findInstruments(ISPObservation o) { List<ISPObsComponent> result = new ArrayList<>(); if (o != null) { Iterator<ISPObsComponent> iter = o.getObsComponents().iterator(); while (iter.hasNext()) { ISPObsComponent obsComp = iter.next(); if (isInstrument(obsComp)) { result.add(obsComp); } } // do a second loop to make sure the Altair component comes after the instrument iter = o.getObsComponents().iterator(); while (iter.hasNext()) { ISPObsComponent obsComp = iter.next(); if (isAltair(obsComp)) { result.add(obsComp); } } } return result; } /** * Find and return the instrument node corresponding to the given observation. */ public static ISPObsComponent findInstrument(ISPObservation o) { if (o != null) { for (ISPObsComponent obsComp : o.getObsComponents()) { if (isInstrument(obsComp)) { return obsComp; } } } return null; } /** * If there is an ISPObsComponent of the given type in the given observation, * return it, otherwise return null. */ public static ISPObsComponent findObsComponent(ISPObservation o, SPComponentType type) { if (o == null || type == null) return null; for (ISPObsComponent obsComp : o.getObsComponents()) { if (obsComp.getType().equals(type)) return obsComp; } return null; } /** * If there is an ISPObsComponent of the given broad type in the given observation, * return it, otherwise return null. */ public static ISPObsComponent findObsComponentByBroadType(ISPObservation o, SPComponentBroadType broadType) { if (o == null || broadType == null) return null; for (ISPObsComponent obsComp : o.getObsComponents()) { if (obsComp.getType().broadType.equals(broadType)) return obsComp; } return null; } /** * If there is an ISPObsComponent of the given narrow type in the given observation, * return it, otherwise return null. */ public static ISPObsComponent findObsComponentByNarrowType(ISPObservation o, String narrowType) { if (o == null || narrowType == null) return null; for (ISPObsComponent obsComp : o.getObsComponents()) { if (obsComp.getType().narrowType.equals(narrowType)) return obsComp; } return null; } /** * If there is an ISPSeqComponent of the given type in the given observation, * return it, otherwise return null. */ public static ISPSeqComponent findSeqComponent(ISPObservation o, SPComponentType type) { if (o == null || type == null) return null; ISPSeqComponent sc = o.getSeqComponent(); if (sc != null && sc.getType().equals(type)) return sc; return findSeqComponent(sc, type); } /** * If there is an ISPSeqComponent of the given type under the given sequence node, * return it, otherwise return null. */ public static ISPSeqComponent findSeqComponent(ISPSeqComponent sc, SPComponentType type) { if (sc == null || type == null) return null; List<ISPSeqComponent> l = DBSequenceNodeService.findSeqComponentsByType(sc, type, true); if (l != null && l.size() != 0) { return l.get(0); } return null; } /** * Return a list containing all of the sequence nodes with the given * type in the given observation. * * @param o the observation node * @param type the node type * @return a list of ISPSeqComponent objects with the given type */ public static List<ISPSeqComponent> findSeqComponents(ISPObservation o, SPComponentType type) { return (o == null || type == null) ? _EMPTY_LIST : findSeqComponents(o.getSeqComponent(), type); } /** * Return a list of all of the sequence components with the given type, * starting the search at the given sequence component. */ public static List<ISPSeqComponent> findSeqComponents(ISPSeqComponent sc, SPComponentType type) { if (sc == null || type == null) return _EMPTY_LIST; return DBSequenceNodeService.findSeqComponentsByType(sc, type, false); } /** * If there is an ISPSeqComponent with the given narrow type in the given sequence, * return it, otherwise return null. * * @param sc the starting sequence component * @param narrowType string that should match a a SPComponentType.narrowType value * @param noObserve if true, search only as far as the first observe node */ public static ISPSeqComponent findSeqComponentByNarrowType(ISPSeqComponent sc, String narrowType, boolean noObserve) { if (sc == null || narrowType == null) return null; List<ISPSeqComponent> l = DBSequenceNodeService.findSeqComponentsByNarrowType(sc, narrowType, false, noObserve); if (l != null && l.size() != 0) { return l.get(0); } return null; } /** * Returns the {@link edu.gemini.spModel.obsrecord.ObsExecRecord} for the given observation from the { * @link ObsLogDataObject} if found, otherwise null. */ public static ObsExecRecord getObsRecord(ISPObservation obs) { final ISPObsExecLog log = obs.getObsExecLog(); return (log == null) ? null : ((ObsExecLog) log.getDataObject()).getRecord(); } }
/** * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.ingestion.sink.cassandra; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.EventDeliveryException; import org.apache.flume.Sink.Status; import org.apache.flume.Transaction; import org.apache.flume.channel.MemoryChannel; import org.apache.flume.conf.Configurables; import org.apache.flume.event.EventBuilder; import org.apache.thrift.transport.TTransportException; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.Session; import com.google.common.base.Charsets; @RunWith(JUnit4.class) public class CassandraDataTypesIT { private static final Logger log = LoggerFactory.getLogger(CassandraDataTypesIT.class); private final static String KEYSPACE = "keyspaceTest"; private final static String TABLE = "tableTest"; private final static String PRIMARY_KEY = "id_field"; private final static String TEXT_FIELD = "text_field"; private final static String VARCHAR_FIELD = "varchar_field"; private final static String VARINT_FIELD = "varint_field"; private final static String ASCII_FIELD = "ascii_field"; private final static String BOOLEAN_FIELD = "boolean_field"; private final static String DECIMAL_FIELD = "decimal_field"; private final static String DOUBLE_FIELD = "double_field"; private final static String FLOAT_FIELD = "float_field"; private final static String INET_FIELD = "inet_field"; private final static String INT_FIELD = "int_field"; private final static String LIST_FIELD = "list_field"; private final static String MAP_FIELD = "map_field"; private final static String SET_FIELD = "set_field"; private final static String TIMESTAMP_FIELD = "timestamp_field"; private final static String UUID_FIELD = "uuid_field"; private final static String BIGINT_FIELD = "bigint_field"; private MemoryChannel channel; private CassandraSink sink; private Map<String, String> headers; @Before public void setup() throws TTransportException, IOException, InterruptedException { final Context context = new Context(); final InetSocketAddress contactPoint = CassandraTestHelper.getCassandraContactPoint(); context.put("tables", KEYSPACE + "." + TABLE); context.put("hosts", contactPoint.getAddress().getHostAddress()); context.put("batchSize", "1"); context.put("consistency", "QUORUM"); Cluster cluster = Cluster.builder() .addContactPointsWithPorts(Collections.singletonList(contactPoint)) .build(); Session session = cluster.connect(); session.execute( "CREATE KEYSPACE IF NOT EXISTS keyspaceTest WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };"); session.execute("CREATE TABLE if not exists keyspaceTest.tableTest (" + PRIMARY_KEY + " uuid, " + TEXT_FIELD + " text, " + VARCHAR_FIELD + " varchar, " + VARINT_FIELD + " varint, " + ASCII_FIELD + " ascii, " + BOOLEAN_FIELD + " boolean, " + DECIMAL_FIELD + " decimal, " + DOUBLE_FIELD + " double, " + FLOAT_FIELD + " float, " + INET_FIELD + " inet, " + INT_FIELD + " int, " + LIST_FIELD + " list<TEXT>, " + MAP_FIELD + " map<TEXT,INT>, " + SET_FIELD + " set<TEXT>, " + TIMESTAMP_FIELD + " timestamp, " + UUID_FIELD + " uuid, " + BIGINT_FIELD + " bigint, PRIMARY KEY (" + PRIMARY_KEY + "));"); session.close(); cluster.close(); sink = new CassandraSink(); sink.configure(context); Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); channel = new MemoryChannel(); channel.setName("junitChannel"); Configurables.configure(channel, channelContext); sink.setChannel(channel); sink.start(); headers = new HashMap<String, String>(); headers.put(PRIMARY_KEY, UUID.randomUUID().toString()); } @After public void tearDown() { sink.stop(); } @Test public void textFieldAllowsText() { testFieldType(TEXT_FIELD, "text", Status.READY); } @Test public void intFieldAllowsIntegers() { testFieldType(INT_FIELD, "123", Status.READY); } @Test public void intFieldDoesNotAllowText() { testFieldType(INT_FIELD, "text", Status.BACKOFF); } @Test public void varcharFieldAllowsText() { testFieldType(VARCHAR_FIELD, "varchar", Status.READY); } @Test public void varintFieldAllowsIntegers() { testFieldType(VARINT_FIELD, "123", Status.READY); } @Test public void varintFieldDoesNotAllowText() { testFieldType(VARINT_FIELD, "text", Status.BACKOFF); } @Test public void asciiFieldAllowsText() { testFieldType(ASCII_FIELD, "abcd", Status.READY); } @Test public void booleanFieldAllowsAnything() { testFieldType(BOOLEAN_FIELD, "false", Status.READY); } @Test public void decimalFieldAllowsFloats() { testFieldType(DECIMAL_FIELD, "123.45", Status.READY); } @Test public void decimalFieldAllowsIntegers() { testFieldType(DECIMAL_FIELD, "123", Status.READY); } @Test public void decimalFieldDoesNotAllowText() { testFieldType(DECIMAL_FIELD, "text", Status.BACKOFF); } @Test public void doubleFieldAllowsIntegers() { testFieldType(DOUBLE_FIELD, "123", Status.READY); } @Test public void doubleFieldDoesNotAllowText() { testFieldType(DOUBLE_FIELD, "text", Status.BACKOFF); } @Test public void floatFieldAllowsFloats() { testFieldType(FLOAT_FIELD, "123.45", Status.READY); } @Test public void floatFieldAllowsIntegers() { testFieldType(FLOAT_FIELD, "123", Status.READY); } @Test public void floatFieldDoesNotAllowText() { testFieldType(FLOAT_FIELD, "text", Status.BACKOFF); } @Test public void inetFieldAllowsInet() { testFieldType(INET_FIELD, "123.10.123.10", Status.READY); } @Test public void inetFieldDoesNotAllowText() { testFieldType(INET_FIELD, "text", Status.BACKOFF); } @Test public void listFieldAllowsList() { testFieldType(LIST_FIELD, "[\'a\', \'b\', \'c\', \'d\', \'e\']", Status.READY); } @Test public void mapFieldAllowsMap() { testFieldType(MAP_FIELD, "{\'a\': 0, \'c\': 1}", Status.READY); } @Test public void setFieldAllowsList() { testFieldType(SET_FIELD, "{\'a\', \'b\', \'c\', \'d\', \'e\'}", Status.READY); } @Test public void timestampFieldAllowsDatesWithTheFormatDefined() { testFieldType(TIMESTAMP_FIELD, "1231234", Status.READY); testFieldType(TIMESTAMP_FIELD, "2010-12-20T10:20:20", Status.READY); testFieldType(TIMESTAMP_FIELD, "2010-12-20T10:20:20.000", Status.READY); } @Test public void timestampFieldDoesNotAllowDatesWithOtherFormatThatTheDefined() { testFieldType(TIMESTAMP_FIELD, "1/2/3/4/5", Status.BACKOFF); } @Test public void UUIDFieldAllowsUUID() { testFieldType(UUID_FIELD, "550e8400-e29b-41d4-a716-446655440000", Status.READY); } @Test public void UUIDFieldDoesNotAllowInvalidUUID() { testFieldType(UUID_FIELD, "550e8400", Status.BACKOFF); } @Test public void bigintFieldAllowsIntegers() { testFieldType(BIGINT_FIELD, "12345", Status.READY); } @Test public void bigintFieldDoesNotAllowText() { testFieldType(BIGINT_FIELD, "text", Status.BACKOFF); } private void testFieldType(final String field, final String value, final Status result) { headers.put(field, value); addEventToChannel(headers); boolean thrown = false; try { Status status = sink.process(); Assert.assertEquals(result, status); } catch (EventDeliveryException ex) { thrown = true; } final Transaction tx = channel.getTransaction(); tx.begin(); final Event nextEvent = channel.take(); tx.commit(); tx.close(); if (result == Status.READY) { Assert.assertFalse(thrown); Assert.assertNull(nextEvent); } else { Assert.assertTrue(thrown); Assert.assertNotNull(nextEvent); } } private void addEventToChannel(Map<String, String> headers) { Event event = EventBuilder.withBody("body", Charsets.UTF_8, headers); Transaction transaction = channel.getTransaction(); transaction.begin(); channel.put(event); transaction.commit(); transaction.close(); } }
package com.mapswithme.maps.widget.placepage; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.StringRes; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.cocosw.bottomsheet.BottomSheet; import com.mapswithme.maps.MwmApplication; import com.mapswithme.maps.R; import com.mapswithme.maps.routing.RoutingController; import com.mapswithme.util.BottomSheetHelper; import com.mapswithme.util.ThemeUtils; import com.mapswithme.util.UiUtils; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public final class PlacePageButtons { private static final Map<Integer, PartnerItem> PARTNERS_ITEMS = new HashMap<Integer, PartnerItem>() {{ // --------------------------------------------------------------------------------------------- // Warning: the following code is autogenerated. // Do NOT change it manually. // %PartnersExtender.PartnerItemMap put(PartnerItem.PARTNER1.getIndex(), PartnerItem.PARTNER1); put(PartnerItem.PARTNER2.getIndex(), PartnerItem.PARTNER2); put(PartnerItem.PARTNER3.getIndex(), PartnerItem.PARTNER3); put(PartnerItem.PARTNER18.getIndex(), PartnerItem.PARTNER18); put(PartnerItem.PARTNER19.getIndex(), PartnerItem.PARTNER19); put(PartnerItem.PARTNER20.getIndex(), PartnerItem.PARTNER20); // /%PartnersExtender.PartnerItemMap // End of autogenerated code. // --------------------------------------------------------------------------------------------- }}; private final int mMaxButtons; private final PlacePageView mPlacePage; private final ViewGroup mFrame; private final ItemListener mItemListener; private List<PlacePageButton> mPrevItems; interface PlacePageButton { @StringRes int getTitle(); ImageResources getIcon(); @NonNull ButtonType getType(); @DrawableRes int getBackgroundResource(); class ImageResources { @DrawableRes private final int mEnabledStateResId; @DrawableRes private final int mDisabledStateResId; public ImageResources(@DrawableRes int enabledStateResId, @DrawableRes int disabledStateResId) { mEnabledStateResId = enabledStateResId; mDisabledStateResId = disabledStateResId; } public ImageResources(@DrawableRes int enabledStateResId) { this(enabledStateResId, enabledStateResId); } @DrawableRes public int getDisabledStateResId() { return mDisabledStateResId; } @DrawableRes public int getEnabledStateResId() { return mEnabledStateResId; } public static class Stub extends ImageResources { public Stub() { super(UiUtils.NO_ID); } @Override public int getDisabledStateResId() { throw new UnsupportedOperationException("Not supported here"); } @Override public int getEnabledStateResId() { throw new UnsupportedOperationException("Not supported here"); } } } } enum ButtonType { // --------------------------------------------------------------------------------------------- // Warning: the following code is autogenerated. // Do NOT change it manually. // %PartnersExtender.ButtonType PARTNER1, PARTNER2, PARTNER3, PARTNER18, PARTNER19, PARTNER20, // /%PartnersExtender.ButtonType // End of autogenerated code. // --------------------------------------------------------------------------------------------- BOOKING, BOOKING_SEARCH, OPENTABLE, BACK, BOOKMARK, ROUTE_FROM, ROUTE_TO, ROUTE_ADD, ROUTE_REMOVE, SHARE, MORE, CALL } enum PartnerItem implements PlacePageButtons.PlacePageButton { // --------------------------------------------------------------------------------------------- // Warning: the following code is autogenerated. // Do NOT change it manually. // %PartnersExtender.PartnerItem PARTNER1( 1, R.string.sponsored_partner1_action, new ImageResources(R.drawable.ic_24px_logo_partner1), R.drawable.button_partner1, ButtonType.PARTNER1), PARTNER2( 2, R.string.sponsored_partner2_action, new ImageResources(R.drawable.ic_24px_logo_partner2), R.drawable.button_partner2, ButtonType.PARTNER2), PARTNER3( 3, R.string.sponsored_partner3_action, new ImageResources(R.drawable.ic_24px_logo_partner3), R.drawable.button_partner3, ButtonType.PARTNER3), PARTNER18( 18, R.string.sponsored_partner18_action, new ImageResources(R.drawable.ic_24px_logo_partner18), R.drawable.button_partner18, ButtonType.PARTNER18), PARTNER19( 19, R.string.sponsored_partner19_action, new ImageResources(R.drawable.ic_24px_logo_partner19), R.drawable.button_partner19, ButtonType.PARTNER19), PARTNER20( 20, R.string.sponsored_partner20_action, new ImageResources(R.drawable.ic_24px_logo_partner20), R.drawable.button_partner20, ButtonType.PARTNER20); // /%PartnersExtender.PartnerItem // End of autogenerated code. // --------------------------------------------------------------------------------------------- private final int mIndex; @StringRes private final int mTitleId; private final ImageResources mIconId; @DrawableRes private final int mBackgroundId; @NonNull private final ButtonType mButtonType; PartnerItem(int index, @StringRes int titleId, @NonNull ImageResources iconId, @DrawableRes int backgroundId, @NonNull ButtonType buttonType) { mIndex = index; mTitleId = titleId; mIconId = iconId; mBackgroundId = backgroundId; mButtonType = buttonType; } public int getIndex() { return mIndex; } @StringRes @Override public int getTitle() { return mTitleId; } @NonNull @Override public ImageResources getIcon() { return mIconId; } @NonNull @Override public ButtonType getType() { return mButtonType; } @DrawableRes @Override public int getBackgroundResource() { return mBackgroundId; } } enum Item implements PlacePageButtons.PlacePageButton { BOOKING( R.string.book_button, new ImageResources(R.drawable.ic_booking), ButtonType.BOOKING) { @DrawableRes @Override public int getBackgroundResource() { return R.drawable.button_booking; } }, BOOKING_SEARCH( R.string.booking_search, new ImageResources(R.drawable.ic_menu_search), ButtonType.BOOKING_SEARCH) { @DrawableRes @Override public int getBackgroundResource() { return R.drawable.button_booking; } }, OPENTABLE( R.string.book_button, new ImageResources(R.drawable.ic_opentable), ButtonType.OPENTABLE) { @DrawableRes @Override public int getBackgroundResource() { return R.drawable.button_opentable; } }, BACK( R.string.back, new ImageResources.Stub() { @Override public int getEnabledStateResId() { return ThemeUtils.getResource(MwmApplication.get(), android.R.attr.homeAsUpIndicator); } }, ButtonType.BACK), BOOKMARK( R.string.bookmark, new ImageResources(R.drawable.ic_bookmarks_off), ButtonType.BOOKMARK), ROUTE_FROM( R.string.p2p_from_here, new ImageResources(R.drawable.ic_route_from), ButtonType.ROUTE_FROM), ROUTE_TO( R.string.p2p_to_here, new ImageResources(R.drawable.ic_route_to), ButtonType.ROUTE_TO), ROUTE_ADD( R.string.placepage_add_stop, new ImageResources(R.drawable.ic_route_via), ButtonType.ROUTE_ADD), ROUTE_REMOVE( R.string.placepage_remove_stop, new ImageResources(R.drawable.ic_route_remove), ButtonType.ROUTE_REMOVE), SHARE( R.string.share, new ImageResources(R.drawable.ic_share), ButtonType.SHARE), // Must not be used outside MORE( R.string.placepage_more_button, new ImageResources(R.drawable.bs_ic_more), ButtonType.MORE), CALL( R.string.placepage_call_button, new ImageResources(R.drawable.ic_place_page_phone), ButtonType.CALL); @StringRes private final int mTitleId; @NonNull private final ImageResources mIconId; @NonNull private final ButtonType mButtonType; Item(@StringRes int titleId, @NonNull ImageResources iconId, @NonNull ButtonType buttonType) { mTitleId = titleId; mIconId = iconId; mButtonType = buttonType; } @StringRes @Override public int getTitle() { return mTitleId; } @NonNull @Override public ImageResources getIcon() { return mIconId; } @NonNull @Override public ButtonType getType() { return mButtonType; } @DrawableRes @Override public int getBackgroundResource() { throw new UnsupportedOperationException("Not supported!"); } } interface ItemListener { void onPrepareVisibleView(@NonNull PlacePageButton item, @NonNull View frame, @NonNull ImageView icon, @NonNull TextView title); void onItemClick(PlacePageButton item); } PlacePageButtons(PlacePageView placePage, ViewGroup frame, ItemListener itemListener) { mPlacePage = placePage; mFrame = frame; mItemListener = itemListener; mMaxButtons = mPlacePage.getContext().getResources().getInteger(R.integer.pp_buttons_max); } @NonNull static PlacePageButtons.PlacePageButton getPartnerItem(int partnerIndex) { PlacePageButtons.PlacePageButton item = PARTNERS_ITEMS.get(partnerIndex); if (item == null) throw new AssertionError("Wrong partner index: " + partnerIndex); return item; } private @NonNull List<PlacePageButtons.PlacePageButton> collectButtons(List<PlacePageButtons.PlacePageButton> items) { List<PlacePageButtons.PlacePageButton> res = new ArrayList<>(items); if (res.size() > mMaxButtons) res.add(mMaxButtons - 1, Item.MORE); // Swap ROUTE_FROM and ROUTE_TO if the latter one was pressed out to bottomsheet int from = res.indexOf(Item.ROUTE_FROM); if (from > -1) { int addStop = res.indexOf(Item.ROUTE_ADD); int to = res.indexOf(Item.ROUTE_TO); if ((to > from && to >= mMaxButtons) || (to > from && addStop >= mMaxButtons)) Collections.swap(res, from, to); if (addStop >= mMaxButtons) { from = res.indexOf(Item.ROUTE_FROM); if (addStop > from) Collections.swap(res, from, addStop); } preserveRoutingButtons(res, Item.CALL); preserveRoutingButtons(res, Item.BOOKING); preserveRoutingButtons(res, Item.BOOKING_SEARCH); from = res.indexOf(Item.ROUTE_FROM); to = res.indexOf(Item.ROUTE_TO); if (from < mMaxButtons && from > to) Collections.swap(res, to, from); } return res; } private void preserveRoutingButtons(@NonNull List<PlacePageButton> items, @NonNull Item itemToShift) { if (!RoutingController.get().isNavigating() && !RoutingController.get().isPlanning()) return; int pos = items.indexOf(itemToShift); if (pos > -1) { items.remove(pos); items.add(mMaxButtons, itemToShift); int to = items.indexOf(Item.ROUTE_TO); if (items.indexOf(Item.ROUTE_ADD) > -1) { items.remove(Item.ROUTE_ADD); items.remove(Item.ROUTE_FROM); items.add(to + 1, Item.ROUTE_ADD); items.add(mMaxButtons, Item.ROUTE_FROM); } else { items.remove(Item.ROUTE_FROM); items.add(to + 1, Item.ROUTE_FROM); } } } private void showPopup(final List<PlacePageButton> buttons) { BottomSheetHelper.Builder bs = new BottomSheetHelper.Builder(mPlacePage.getActivity()); for (int i = mMaxButtons; i < buttons.size(); i++) { PlacePageButton bsItem = buttons.get(i); int iconRes = bsItem.getIcon().getEnabledStateResId(); bs.sheet(i, iconRes, bsItem.getTitle()); } BottomSheet bottomSheet = bs.listener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { mItemListener.onItemClick(buttons.get(item.getItemId())); return true; } }).build(); BottomSheetHelper.tint(bottomSheet); bottomSheet.show(); } private View createButton(@NonNull final List<PlacePageButton> items, @NonNull final PlacePageButton current) { LayoutInflater inflater = LayoutInflater.from(mPlacePage.getContext()); View parent = inflater.inflate(R.layout.place_page_button, mFrame, false); ImageView icon = (ImageView) parent.findViewById(R.id.icon); TextView title = (TextView) parent.findViewById(R.id.title); title.setText(current.getTitle()); icon.setImageResource(current.getIcon().getEnabledStateResId()); mItemListener.onPrepareVisibleView(current, parent, icon, title); parent.setOnClickListener(new ShowPopupClickListener(current, items)); return parent; } void setItems(List<PlacePageButton> items) { final List<PlacePageButton> buttons = collectButtons(items); if (buttons.equals(mPrevItems)) return; mFrame.removeAllViews(); int count = Math.min(buttons.size(), mMaxButtons); for (int i = 0; i < count; i++) mFrame.addView(createButton(buttons, buttons.get(i))); mPrevItems = buttons; } private class ShowPopupClickListener implements View.OnClickListener { @NonNull private final PlacePageButton mCurrent; @NonNull private final List<PlacePageButton> mItems; public ShowPopupClickListener(@NonNull PlacePageButton current, @NonNull List<PlacePageButton> items) { mCurrent = current; mItems = items; } @Override public void onClick(View v) { if (mCurrent == Item.MORE) showPopup(mItems); else mItemListener.onItemClick(mCurrent); } } }
/* * Copyright (C) 2014-2015 OMRON Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package omron.SimpleDemo; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.bluetooth.BluetoothDevice; import android.content.Context; import android.content.DialogInterface; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import omron.HVC.BleDeviceSearch; import omron.HVC.HVC; import omron.HVC.HVC_BLE; import omron.HVC.HVC_PRM; import omron.HVC.HVC_RES; import omron.HVC.HVC_RES.DetectionResult; import omron.HVC.HVC_RES.FaceResult; import omron.HVC.HVCBleCallback; public class MainActivity extends Activity { public static final int EXECUTE_STOP = 0; public static final int EXECUTE_START = 1; public static final int EXECUTE_END = -1; private HVC_BLE hvcBle = null; private HVC_PRM hvcPrm = null; private HVC_RES hvcRes = null; private HVCDeviceThread hvcThread = null; private static int isExecute = 0; private static int nSelectDeviceNo = -1; private static List<BluetoothDevice> deviceList = null; private static DeviceDialogFragment newFragment = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); hvcBle = new HVC_BLE(); hvcPrm = new HVC_PRM(); hvcRes = new HVC_RES(); hvcBle.setCallBack(hvcCallback); hvcThread = new HVCDeviceThread(); hvcThread.start(); } @Override public void onDestroy() { isExecute = EXECUTE_END; while ( isExecute == EXECUTE_END ); if ( hvcBle != null ) { try { hvcBle.finalize(); } catch (Throwable e) { // TODO Auto-generated catch block e.printStackTrace(); } } hvcBle = null; super.onDestroy(); } private class HVCDeviceThread extends Thread { @Override public void run() { isExecute = EXECUTE_STOP; while (isExecute != EXECUTE_END) { BluetoothDevice device = SelectHVCDevice("OMRON_HVC.*|omron_hvc.*"); if ( (device == null) || (isExecute != EXECUTE_START) ) { continue; } hvcBle.connect(getApplicationContext(), device); wait(15); hvcPrm.cameraAngle = HVC_PRM.HVC_CAMERA_ANGLE.HVC_CAMERA_ANGLE_0; hvcPrm.face.MinSize = 100; hvcPrm.face.MaxSize = 400; hvcBle.setParam(hvcPrm); wait(15); while ( isExecute == EXECUTE_START ) { int nUseFunc = HVC.HVC_ACTIV_BODY_DETECTION | HVC.HVC_ACTIV_HAND_DETECTION | HVC.HVC_ACTIV_FACE_DETECTION | HVC.HVC_ACTIV_FACE_DIRECTION | HVC.HVC_ACTIV_AGE_ESTIMATION | HVC.HVC_ACTIV_GENDER_ESTIMATION | HVC.HVC_ACTIV_GAZE_ESTIMATION | HVC.HVC_ACTIV_BLINK_ESTIMATION | HVC.HVC_ACTIV_EXPRESSION_ESTIMATION; hvcBle.execute(nUseFunc, hvcRes); wait(30); } hvcBle.disconnect(); } isExecute = EXECUTE_STOP; } public void wait(int nWaitCount) { do { try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } if ( !hvcBle.IsBusy() ) { return; } nWaitCount--; } while ( nWaitCount > 0 ); } } private BluetoothDevice SelectHVCDevice(String regStr) { if ( nSelectDeviceNo < 0 ) { if ( newFragment != null ) { BleDeviceSearch bleSearch = new BleDeviceSearch(getApplicationContext()); // Show toast showToast("You can select a device"); while ( newFragment != null ) { deviceList = bleSearch.getDevices(); try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } bleSearch.stopDeviceSearch(getApplicationContext()); } if ( nSelectDeviceNo > -1 ) { // Generate pattern to determine Pattern p = Pattern.compile(regStr); Matcher m = p.matcher(deviceList.get(nSelectDeviceNo).getName()); if ( m.find() ) { // Find HVC device return deviceList.get(nSelectDeviceNo); } nSelectDeviceNo = -1; } return null; } return deviceList.get(nSelectDeviceNo); } private final HVCBleCallback hvcCallback = new HVCBleCallback() { @Override public void onConnected() { // Show toast showToast("Selected device has connected"); } @Override public void onDisconnected() { // Show toast showToast("Selected device has disconnected"); runOnUiThread(new Runnable() { @Override public void run() { Button bt = (Button) findViewById(R.id.button2); bt.setText(R.string.buttonS); } }); isExecute = EXECUTE_STOP; } @Override public void onPostSetParam(int nRet, byte outStatus) { // Show toast String str = "Set parameters : " + String.format("ret = %d / status = 0x%02x", nRet, outStatus); showToast(str); } @Override public void onPostGetParam(int nRet, byte outStatus) { // Show toast String str = "Get parameters : " + String.format("ret = %d / status = 0x%02x", nRet, outStatus); showToast(str); } @Override public void onPostExecute(int nRet, byte outStatus) { if ( nRet != HVC.HVC_NORMAL || outStatus != 0 ) { String str = "Execute : " + String.format("ret = %d / status = 0x%02x", nRet, outStatus); showToast(str); } else { String str = "Body Detect = " + String.format("%d\n", hvcRes.body.size()); for (DetectionResult bodyResult : hvcRes.body) { int size = bodyResult.size; int posX = bodyResult.posX; int posY = bodyResult.posY; int conf = bodyResult.confidence; str += String.format(" [Body Detection] : size = %d, x = %d, y = %d, conf = %d\n", size, posX, posY, conf); } str += "Hand Detect = " + String.format("%d\n", hvcRes.hand.size()); for (DetectionResult handResult : hvcRes.hand) { int size = handResult.size; int posX = handResult.posX; int posY = handResult.posY; int conf = handResult.confidence; str += String.format(" [Hand Detection] : size = %d, x = %d, y = %d, conf = %d\n", size, posX, posY, conf); } str += "Face Detect = " + String.format("%d\n", hvcRes.face.size()); for (FaceResult faceResult : hvcRes.face) { if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_FACE_DETECTION) != 0 ) { int size = faceResult.size; int posX = faceResult.posX; int posY = faceResult.posY; int conf = faceResult.confidence; str += String.format(" [Face Detection] : size = %d, x = %d, y = %d, conf = %d\n", size, posX, posY, conf); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_FACE_DIRECTION) != 0 ) { str += String.format(" [Face Direction] : yaw = %d, pitch = %d, roll = %d, conf = %d\n", faceResult.dir.yaw, faceResult.dir.pitch, faceResult.dir.roll, faceResult.dir.confidence); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_AGE_ESTIMATION) != 0 ) { str += String.format(" [Age Estimation] : age = %d, conf = %d\n", faceResult.age.age, faceResult.age.confidence); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_GENDER_ESTIMATION) != 0 ) { str += String.format(" [Gender Estimation] : gender = %s, confidence = %d\n", faceResult.gen.gender == HVC.HVC_GEN_MALE ? "Male" : "Female", faceResult.gen.confidence); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_GAZE_ESTIMATION) != 0 ) { str += String.format(" [Gaze Estimation] : LR = %d, UD = %d\n", faceResult.gaze.gazeLR, faceResult.gaze.gazeUD); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_BLINK_ESTIMATION) != 0 ) { str += String.format(" [Blink Estimation] : ratioL = %d, ratioR = %d\n", faceResult.blink.ratioL, faceResult.blink.ratioR); } if ( (hvcRes.executedFunc & HVC.HVC_ACTIV_EXPRESSION_ESTIMATION) != 0 ) { str += String.format(" [Expression Estimation] : expression = %s, score = %d, degree = %d\n", faceResult.exp.expression == HVC.HVC_EX_NEUTRAL ? "Neutral" : faceResult.exp.expression == HVC.HVC_EX_HAPPINESS ? "Happiness" : faceResult.exp.expression == HVC.HVC_EX_SURPRISE ? "Surprise" : faceResult.exp.expression == HVC.HVC_EX_ANGER ? "Anger" : faceResult.exp.expression == HVC.HVC_EX_SADNESS ? "Sadness" : "" , faceResult.exp.score, faceResult.exp.degree); } } final String viewText = str; runOnUiThread(new Runnable() { @Override public void run() { TextView tvVer = (TextView) findViewById(R.id.textView1); tvVer.setText(viewText); } }); } } }; public void onClick1(View view) { switch (view.getId()){ case R.id.button1: if ( isExecute == EXECUTE_START ) { // Show toast Toast.makeText(this, "You are executing now", Toast.LENGTH_SHORT).show(); break; } nSelectDeviceNo = -1; newFragment = new DeviceDialogFragment(); newFragment.setCancelable(false); newFragment.show(getFragmentManager(), "Bluetooth Devices"); break; } } public void onClick2(View view) { switch (view.getId()){ case R.id.button2: if ( nSelectDeviceNo == -1 ) { // Show toast Toast.makeText(this, "You must select device", Toast.LENGTH_SHORT).show(); break; } if ( isExecute == EXECUTE_STOP ) { Button bt = (Button) findViewById(R.id.button2); bt.setText(R.string.buttonE); isExecute = EXECUTE_START; } else if ( isExecute == EXECUTE_START ) { Button bt = (Button) findViewById(R.id.button2); bt.setText(R.string.buttonS); isExecute = EXECUTE_STOP; } break; } } public void showToast(final String str) { // Show toast runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getApplicationContext(), str, Toast.LENGTH_SHORT).show(); } }); } public class DeviceDialogFragment extends DialogFragment { String[] deviceNameList = null; ArrayAdapter<String> ListAdpString = null; @SuppressLint("InflateParams") @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // Use the Builder class for convenient dialog construction AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = (LayoutInflater)getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); View content = inflater.inflate(R.layout.devices, null); builder.setView(content); ListView listView = (ListView)content.findViewById(R.id.devices); // Set adapter ListAdpString = new ArrayAdapter<String>(getActivity(), android.R.layout.simple_list_item_single_choice); listView.setAdapter(ListAdpString); // Set the click event in the list view listView.setOnItemClickListener(new AdapterView.OnItemClickListener(){ /** * It is called when you click on an item */ @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { nSelectDeviceNo = position; newFragment = null; dismiss(); } }); DeviceDialogThread dlgThread = new DeviceDialogThread(); dlgThread.start(); builder.setMessage(getString(R.string.button1)) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { newFragment = null; } }); // Create the AlertDialog object and return it return builder.create(); } private class DeviceDialogThread extends Thread { @Override public void run() { do { runOnUiThread(new Runnable() { @Override public void run() { if ( ListAdpString != null ) { ListAdpString.clear(); if ( deviceList == null ) { deviceNameList = new String[] { "null" }; } else { synchronized (deviceList) { deviceNameList = new String[deviceList.size()]; int nIndex = 0; for (BluetoothDevice device : deviceList) { if (device.getName() == null ) { deviceNameList[nIndex] = "no name"; } else { deviceNameList[nIndex] = device.getName(); } nIndex++; } } } ListAdpString.addAll(deviceNameList); ListAdpString.notifyDataSetChanged(); } } }); try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } while(true); } } } @Override public void onBackPressed() { new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.popup_title) .setMessage(R.string.popup_message) .setPositiveButton(R.string.popup_yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { try { finish(); } catch (Throwable e) { // TODO Auto-generated catch block e.printStackTrace(); } } }) .setNegativeButton(R.string.popup_no, null) .show(); } }
/** * Copyright (c) 2004-2005, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Los Angeles nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package avrora.sim.mcu; import avrora.arch.avr.AVRProperties; import avrora.arch.legacy.LegacyInterpreter; import avrora.core.Program; import avrora.sim.*; import avrora.sim.clock.ClockDomain; import cck.util.Arithmetic; import java.util.HashMap; /** * The <code>ATMega16</code> class represents the ATMega16 microcontroller from Atmel. This * microcontroller has 16Kb code, 1KB SRAM, 512 Byte EEPROM, and a host of internal devices such as * ADC, SPI, and timers. * * @author Ben L. Titzer * @author Bastian Schlich * @author John F. Schommer * */ public class ATMega16 extends ATMegaFamily { public static final int _1kb = 1024; public static final int _512b = 512; public static final int ATMEGA16_IOREG_SIZE = 64; public static final int ATMEGA16_SRAM_SIZE = _1kb; public static final int ATMEGA16_FLASH_SIZE = 16 * _1kb; public static final int ATMEGA16_EEPROM_SIZE = _512b; public static final int ATMEGA16_NUM_PINS = 41; public static final int ATMEGA16_NUM_INTS = 22; public static final int MODE_IDLE = 1; public static final int MODE_RESERVED1 = 2; public static final int MODE_ADCNRED = 3; public static final int MODE_RESERVED2 = 4; public static final int MODE_POWERDOWN = 5; public static final int MODE_STANDBY = 6; public static final int MODE_POWERSAVE = 7; public static final int MODE_EXTSTANDBY = 8; protected static final String[] idleModeNames = { "Active", "Idle", "RESERVED 1", "ADC Noise Reduction", "RESERVED 2", "Power Down", "Standby", "Power Save", "Extended Standby" }; protected static final int[] wakeupTimes = { 0, 0, 0, 0, 0, 1000, 6, 1000, 6 }; protected final ActiveRegister MCUCR_reg; private static final int[][] transitionTimeMatrix = FiniteStateMachine.buildBimodalTTM(idleModeNames.length, 0, wakeupTimes, new int[wakeupTimes.length]); // CS values 6 and 7 select external clock source and are not supported. Results in an ArrayOutOfBound exception public static final int[] ATmega16Periods0 = {0, 1, 8, 64, 256, 1024}; public static final int[] ATmega16Periods2 = {0, 1, 8, 32, 64, 128, 256, 1024}; /** * The <code>props</code> field stores a static reference to a properties * object shared by all of the instances of this microcontroller. This object * stores the IO register size, SRAM size, pin assignments, etc. */ public static final AVRProperties props; static { // statically initialize the pin assignments for this microcontroller HashMap<String, Integer> pinAssignments = new HashMap<String, Integer>(150); RegisterLayout rl = new RegisterLayout(ATMEGA16_IOREG_SIZE, 8); HashMap<String, Integer> interruptAssignments = new HashMap<String, Integer>(30); addPin(pinAssignments, 1, "XCK", "T0", "PB0"); addPin(pinAssignments, 2, "T1", "PB1"); addPin(pinAssignments, 3, "AIN0", "INT2", "PB2"); addPin(pinAssignments, 4, "AIN1", "OC0", "PB3"); addPin(pinAssignments, 5, "SS", "PB4"); addPin(pinAssignments, 6, "MOSI", "PB5"); addPin(pinAssignments, 7, "MISO", "PB6"); addPin(pinAssignments, 8, "SCK", "PB7"); addPin(pinAssignments, 9, "RESET"); addPin(pinAssignments, 10, "VCC.1"); addPin(pinAssignments, 11, "GND.1"); addPin(pinAssignments, 12, "XTAL2"); addPin(pinAssignments, 13, "XTAL1"); addPin(pinAssignments, 14, "RXD", "PD0"); addPin(pinAssignments, 15, "TXD", "PD1"); addPin(pinAssignments, 16, "INT0", "PD2"); addPin(pinAssignments, 17, "INT1", "PD3"); addPin(pinAssignments, 18, "OC1B", "PD4"); addPin(pinAssignments, 19, "OC1A", "PD5"); addPin(pinAssignments, 20, "ICP1", "PD6"); addPin(pinAssignments, 21, "OC2", "PD7"); addPin(pinAssignments, 22, "TOSC2", "PC7"); addPin(pinAssignments, 23, "TOSC1", "PC6"); addPin(pinAssignments, 24, "TDI", "PC5"); addPin(pinAssignments, 25, "TDO", "PC4"); addPin(pinAssignments, 26, "TMS", "PC3"); addPin(pinAssignments, 27, "TCK", "PC2"); addPin(pinAssignments, 28, "SDA", "PC1"); addPin(pinAssignments, 29, "SCL", "PC0"); addPin(pinAssignments, 30, "AVCC"); addPin(pinAssignments, 31, "GND.2"); addPin(pinAssignments, 32, "AREF"); addPin(pinAssignments, 33, "ADC7", "PA7"); addPin(pinAssignments, 34, "ADC6", "PA6"); addPin(pinAssignments, 35, "ADC5", "PA5"); addPin(pinAssignments, 36, "ADC4", "PA4"); addPin(pinAssignments, 37, "ADC3", "PA3"); addPin(pinAssignments, 38, "ADC2", "PA2"); addPin(pinAssignments, 39, "ADC1", "PA1"); addPin(pinAssignments, 40, "ADC0", "PA0"); // lower 64 IO registers rl.addIOReg("SREG", 0x3F); rl.addIOReg("SPH", 0x3E); rl.addIOReg("SPL", 0x3D); rl.addIOReg("OCR0", 0x3C); rl.addIOReg("GICR", 0x3B); rl.addIOReg("GIFR", 0x3A); rl.addIOReg("TIMSK", 0x39); rl.addIOReg("TIFR", 0x38); rl.addIOReg("SPMCR", 0x37); // TODO: this this register is called different names on different models rl.addIOReg("SPMCSR", 0x37); rl.addIOReg("TWCR", 0x36); rl.addIOReg("MCUCR", 0x35); rl.addIOReg("MCUCSR", 0x34); rl.addIOReg("TCCR0", 0x33); rl.addIOReg("TCNT0", 0x32); rl.addIOReg("OSCCAL", 0x31); rl.addIOReg("SFIOR", 0x30); rl.addIOReg("TCCR1A", 0x2F, "COM1A[1:0],COM1B[1:0],FOC1A,FOC1B,WGM1[1:0]"); rl.addIOReg("TCCR1B", 0x2E, ".,ICES1,.,WGM1[3:2],CS1[2:0]"); rl.addIOReg("TCNT1H", 0x2D); rl.addIOReg("TCNT1L", 0x2C); rl.addIOReg("OCR1AH", 0x2B); rl.addIOReg("OCR1AL", 0x2A); rl.addIOReg("OCR1BH", 0x29); rl.addIOReg("OCR1BL", 0x28); rl.addIOReg("ICR1H", 0x27); rl.addIOReg("ICR1L", 0x26); rl.addIOReg("TCCR2", 0x25); rl.addIOReg("TCNT2", 0x24); rl.addIOReg("OCR2", 0x23); rl.addIOReg("ASSR", 0x22); rl.addIOReg("WDTCR", 0x21); rl.addIOReg("UBRRH", 0x20); // TODO: the UCSRC register is shared! rl.addIOReg("UCSRC", 0x20); rl.addIOReg("EEARH", 0x1F); rl.addIOReg("EEARL", 0x1E); rl.addIOReg("EEDR", 0x1D); rl.addIOReg("EECR", 0x1C); rl.addIOReg("PORTA", 0x1B); rl.addIOReg("DDRA", 0x1A); rl.addIOReg("PINA", 0x19); rl.addIOReg("PORTB", 0x18); rl.addIOReg("DDRB", 0x17); rl.addIOReg("PINB", 0x16); rl.addIOReg("PORTC", 0x15); rl.addIOReg("DDRC", 0x14); rl.addIOReg("PINC", 0x13); rl.addIOReg("PORTD", 0x12); rl.addIOReg("DDRD", 0x11); rl.addIOReg("PIND", 0x10); rl.addIOReg("SPDR", 0x0F); rl.addIOReg("SPSR", 0x0E); rl.addIOReg("SPCR", 0x0D); rl.addIOReg("UDR", 0x0C); rl.addIOReg("UCSRA", 0x0B); rl.addIOReg("UCSRB", 0x0A); rl.addIOReg("UBRRL", 0x09); rl.addIOReg("ACSR", 0x08); rl.addIOReg("ADMUX", 0x07); rl.addIOReg("ADCSRA", 0x06); rl.addIOReg("ADCH", 0x05); rl.addIOReg("ADCL", 0x04); rl.addIOReg("TWDR", 0x03); rl.addIOReg("TWAR", 0x02); rl.addIOReg("TWSR", 0x01); rl.addIOReg("TWBR", 0x00); addInterrupt(interruptAssignments, "RESET", 1); addInterrupt(interruptAssignments, "INT0", 2); addInterrupt(interruptAssignments, "INT1", 3); addInterrupt(interruptAssignments, "INT2", 19); addInterrupt(interruptAssignments, "TIMER2 COMP", 4); addInterrupt(interruptAssignments, "TIMER2 OVF", 5); addInterrupt(interruptAssignments, "TIMER1 CAPT", 6); addInterrupt(interruptAssignments, "TIMER1 COMPA", 7); addInterrupt(interruptAssignments, "TIMER1 COMPB", 8); addInterrupt(interruptAssignments, "TIMER1 OVF", 9); addInterrupt(interruptAssignments, "TIMER0 COMP", 20); addInterrupt(interruptAssignments, "TIMER0 OVF", 10); addInterrupt(interruptAssignments, "SPI, STC", 11); addInterrupt(interruptAssignments, "USART, RX", 12); addInterrupt(interruptAssignments, "USART, UDRE", 13); addInterrupt(interruptAssignments, "USART, TX", 14); addInterrupt(interruptAssignments, "ADC", 15); addInterrupt(interruptAssignments, "EE READY", 16); addInterrupt(interruptAssignments, "ANALOG COMP", 17); addInterrupt(interruptAssignments, "TWI", 18); addInterrupt(interruptAssignments, "SPM READY", 21); props = new AVRProperties(ATMEGA16_IOREG_SIZE, // number of io registers ATMEGA16_SRAM_SIZE, // size of sram in bytes ATMEGA16_FLASH_SIZE, // size of flash in bytes ATMEGA16_EEPROM_SIZE, // size of eeprom in bytes ATMEGA16_NUM_PINS, // number of pins ATMEGA16_NUM_INTS, // number of interrupts new ReprogrammableCodeSegment.Factory(ATMEGA16_FLASH_SIZE, 6), pinAssignments, // the assignment of names to physical pins rl, // the assignment of names to IO registers interruptAssignments); } public static class Factory implements MicrocontrollerFactory { /** * The <code>newMicrocontroller()</code> method is used to instantiate a microcontroller instance for the * particular program. It will construct an instance of the <code>Simulator</code> class that has all the * properties of this hardware device and has been initialized with the specified program. * * @param sim *@param p the program to load onto the microcontroller @return a <code>Microcontroller</code> instance that represents the specific hardware device with the * program loaded onto it */ public Microcontroller newMicrocontroller(int id, Simulation sim, ClockDomain cd, Program p) { return new ATMega16(id, sim, cd, p); } } public ATMega16(int id, Simulation sim, ClockDomain cd, Program p) { super(cd, props, new FiniteStateMachine(cd.getMainClock(), MODE_ACTIVE, idleModeNames, transitionTimeMatrix)); simulator = sim.createSimulator(id, LegacyInterpreter.FACTORY, this, p); interpreter = (AtmelInterpreter)simulator.getInterpreter(); MCUCR_reg = getIOReg("MCUCR"); installPins(); installDevices(); } protected void installPins() { for (int cntr = 0; cntr < properties.num_pins; cntr++) pins[cntr] = new Pin(cntr); } protected void installDevices() { // set up the external interrupt mask and flag registers and interrupt range int[] mapping = new int[] { -1, -1, -1, -1, -1, 4, 2, 3 }; FlagRegister fr = new FlagRegister(interpreter, mapping); MaskRegister mr = new MaskRegister(interpreter, mapping); installIOReg("GICR", mr); installIOReg("GIFR", fr); EIFR_reg = fr; // set up the timer mask and flag registers int[] mappingTIMSK = new int[] { 10, 20, 9, 8, 7, 6, 5, 4 }; TIFR_reg = new FlagRegister(interpreter, mappingTIMSK); TIMSK_reg = new MaskRegister(interpreter, mappingTIMSK); installIOReg("TIFR", TIFR_reg); installIOReg("TIMSK", TIMSK_reg); addDevice(new Timer0()); addDevice(new Timer1(2)); addDevice(new Timer2()); buildPort('A'); buildPort('B'); buildPort('C'); buildPort('D'); addDevice(new EEPROM(properties.eeprom_size, this)); addDevice(new USART("", this)); addDevice(new SPI(this)); addDevice(new ADC(this, 8)); } // permutation of sleep mode bits in the register (high order bits first) private static final int[] MCUCR_sm_perm = { 2, 4, 3 }; protected int getSleepMode() { byte value = MCUCR_reg.read(); boolean sleepEnable = Arithmetic.getBit(value, 5); if ( sleepEnable ) return Arithmetic.getBitField(value, MCUCR_sm_perm) + 1; else return MODE_IDLE; } /** * <code>Timer0</code> is different from ATMega128 */ protected class Timer0 extends Timer8Bit { protected Timer0() { super(ATMega16.this, 0, 1, 0, 1, 0, ATmega16Periods0); } } /** * <code>Timer2</code> is different from ATMega128 */ protected class Timer2 extends Timer8Bit { protected Timer2() { super(ATMega16.this, 2, 7, 6, 7, 6, ATmega16Periods2); installIOReg("ASSR", new ASSRRegister()); } // See pg. 133 of the ATmega16A doc protected class ASSRRegister extends RWRegister { static final int AS2 = 3; static final int TCN2UB = 2; static final int OCR2UB = 1; static final int TCR2UB = 0; public void write(byte val) { super.write((byte) (0xf & val)); decode(val); } protected void decode(byte val) { // TODO: if there is a change, remove ticker and requeue? timerClock = Arithmetic.getBit(val, AS2) ? externalClock : mainClock; } } } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.master; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.PropertyKey; import alluxio.RuntimeConstants; import alluxio.master.journal.JournalSystem; import alluxio.metrics.MetricsSystem; import alluxio.metrics.sink.MetricsServlet; import alluxio.metrics.sink.PrometheusMetricsServlet; import alluxio.network.thrift.BootstrapServerTransport; import alluxio.network.thrift.ThriftUtils; import alluxio.security.authentication.TransportProvider; import alluxio.underfs.UnderFileSystem; import alluxio.underfs.UnderFileSystemConfiguration; import alluxio.util.CommonUtils; import alluxio.util.JvmPauseMonitor; import alluxio.util.URIUtils; import alluxio.util.WaitForOptions; import alluxio.util.network.NetworkAddressUtils; import alluxio.util.network.NetworkAddressUtils.ServiceType; import alluxio.web.MasterWebServer; import alluxio.web.WebServer; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import org.apache.thrift.TMultiplexedProcessor; import org.apache.thrift.TProcessor; import org.apache.thrift.server.TServer; import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.server.TThreadPoolServer.Args; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.Lock; import javax.annotation.Nullable; import javax.annotation.concurrent.NotThreadSafe; /** * This class encapsulates the different master services that are configured to run. */ @NotThreadSafe public class AlluxioMasterProcess implements MasterProcess { private static final Logger LOG = LoggerFactory.getLogger(AlluxioMasterProcess.class); /** Maximum number of threads to serve the rpc server. */ private final int mMaxWorkerThreads; /** Minimum number of threads to serve the rpc server. */ private final int mMinWorkerThreads; /** The port for the RPC server. */ private final int mPort; /** * Lock for pausing modifications to master state. Holding the this lock allows a thread to * guarantee that no other threads will modify master state. */ private final Lock mPauseStateLock; /** The socket for thrift rpc server. */ private TServerSocket mRpcServerSocket; /** The transport provider to create thrift server transport. */ private final TransportProvider mTransportProvider; /** The bind address for the rpc server. */ private final InetSocketAddress mRpcBindAddress; /** The connect address for the rpc server. */ private final InetSocketAddress mRpcConnectAddress; private final MetricsServlet mMetricsServlet = new MetricsServlet(MetricsSystem.METRIC_REGISTRY); private final PrometheusMetricsServlet mPMetricsServlet = new PrometheusMetricsServlet( MetricsSystem.METRIC_REGISTRY); /** The master registry. */ private final MasterRegistry mRegistry; /** The web ui server. */ private WebServer mWebServer; /** The RPC server. */ private TServer mThriftServer; /** The start time for when the master started. */ private final long mStartTimeMs = System.currentTimeMillis(); /** The journal system for writing journal entries and restoring master state. */ protected final JournalSystem mJournalSystem; /** The JVMMonitor Progress. */ private JvmPauseMonitor mJvmPauseMonitor; /** The manager of safe mode state. */ protected final SafeModeManager mSafeModeManager; /** The manager for creating and restoring backups. */ private final BackupManager mBackupManager; /** * Creates a new {@link AlluxioMasterProcess}. */ AlluxioMasterProcess(JournalSystem journalSystem) { mJournalSystem = Preconditions.checkNotNull(journalSystem, "journalSystem"); mMinWorkerThreads = Configuration.getInt(PropertyKey.MASTER_WORKER_THREADS_MIN); mMaxWorkerThreads = Configuration.getInt(PropertyKey.MASTER_WORKER_THREADS_MAX); int connectionTimeout = (int) Configuration.getMs(PropertyKey.MASTER_CONNECTION_TIMEOUT_MS); Preconditions.checkArgument(mMaxWorkerThreads >= mMinWorkerThreads, PropertyKey.MASTER_WORKER_THREADS_MAX + " can not be less than " + PropertyKey.MASTER_WORKER_THREADS_MIN); if (connectionTimeout > 0) { LOG.debug("{} connection timeout[{}] is {}", this, PropertyKey.MASTER_CONNECTION_TIMEOUT_MS, connectionTimeout); } try { // Extract the port from the generated socket. // When running tests, it is fine to use port '0' so the system will figure out what port to // use (any random free port). // In a production or any real deployment setup, port '0' should not be used as it will make // deployment more complicated. if (!Configuration.getBoolean(PropertyKey.TEST_MODE)) { Preconditions.checkState(Configuration.getInt(PropertyKey.MASTER_RPC_PORT) > 0, this + " rpc port is only allowed to be zero in test mode."); Preconditions.checkState(Configuration.getInt(PropertyKey.MASTER_WEB_PORT) > 0, this + " web port is only allowed to be zero in test mode."); } mTransportProvider = TransportProvider.Factory.create(); mRpcServerSocket = ThriftUtils.createThriftServerSocket( NetworkAddressUtils.getBindAddress(ServiceType.MASTER_RPC)); mPort = ThriftUtils.getThriftPort(mRpcServerSocket); // reset master rpc port Configuration.set(PropertyKey.MASTER_RPC_PORT, Integer.toString(mPort)); mRpcBindAddress = NetworkAddressUtils.getBindAddress(ServiceType.MASTER_RPC); mRpcConnectAddress = NetworkAddressUtils.getConnectAddress(ServiceType.MASTER_RPC); if (!mJournalSystem.isFormatted()) { throw new RuntimeException( String.format("Journal %s has not been formatted!", mJournalSystem)); } // Create masters. mRegistry = new MasterRegistry(); mSafeModeManager = new DefaultSafeModeManager(); mBackupManager = new BackupManager(mRegistry); MasterContext context = new MasterContext(mJournalSystem, mSafeModeManager, mBackupManager, mStartTimeMs, mPort); mPauseStateLock = context.pauseStateLock(); MasterUtils.createMasters(mRegistry, context); } catch (Exception e) { throw new RuntimeException(e); } } @Override public <T extends Master> T getMaster(Class<T> clazz) { return mRegistry.get(clazz); } @Override public InetSocketAddress getRpcAddress() { return mRpcConnectAddress; } @Override public long getStartTimeMs() { return mStartTimeMs; } @Override public long getUptimeMs() { return System.currentTimeMillis() - mStartTimeMs; } @Override @Nullable public InetSocketAddress getWebAddress() { if (mWebServer != null) { return new InetSocketAddress(mWebServer.getBindHost(), mWebServer.getLocalPort()); } return null; } @Override public boolean isInSafeMode() { return mSafeModeManager.isInSafeMode(); } @Override public boolean isServing() { return mThriftServer != null && mThriftServer.isServing(); } @Override public boolean waitForReady(int timeoutMs) { try { CommonUtils.waitFor(this + " to start", () -> mThriftServer != null && mThriftServer.isServing() && mWebServer != null && mWebServer.getServer().isRunning(), WaitForOptions.defaults().setTimeoutMs(timeoutMs)); return true; } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; } catch (TimeoutException e) { return false; } } @Override public void start() throws Exception { mJournalSystem.start(); mJournalSystem.gainPrimacy(); startMasters(true); startServing(); } @Override public void stop() throws Exception { if (isServing()) { stopServing(); stopMasters(); mJournalSystem.stop(); } } /** * Starts all masters, including block master, FileSystem master, and additional masters. * * @param isLeader if the Master is leader */ protected void startMasters(boolean isLeader) { try { if (isLeader) { if (Configuration.isSet(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP)) { AlluxioURI backup = new AlluxioURI(Configuration.get(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP)); if (mJournalSystem.isEmpty()) { initFromBackup(backup); } else { LOG.info("The journal system is not freshly formatted, skipping restoring backup from " + backup); } } mSafeModeManager.notifyPrimaryMasterStarted(); } mRegistry.start(isLeader); LOG.info("All masters started"); } catch (IOException e) { throw new RuntimeException(e); } } private void initFromBackup(AlluxioURI backup) throws IOException { UnderFileSystem ufs; if (URIUtils.isLocalFilesystem(backup.toString())) { ufs = UnderFileSystem.Factory.create("/", UnderFileSystemConfiguration.defaults()); } else { ufs = UnderFileSystem.Factory.createForRoot(); } try (UnderFileSystem closeUfs = ufs; InputStream ufsIn = ufs.open(backup.getPath())) { LOG.info("Initializing metadata from backup {}", backup); mBackupManager.initFromBackup(ufsIn); } } /** * Stops all masters, including block master, fileSystem master and additional masters. */ protected void stopMasters() { try { mRegistry.stop(); } catch (IOException e) { throw Throwables.propagate(e); } } private void startServing() { startServing("", ""); } /** * Starts serving, letting {@link MetricsSystem} start sink and starting the web ui server and RPC * Server. * * @param startMessage empty string or the message that the master gains the leadership * @param stopMessage empty string or the message that the master loses the leadership */ protected void startServing(String startMessage, String stopMessage) { MetricsSystem.startSinks(); startServingWebServer(); startJvmMonitorProcess(); LOG.info("Alluxio master version {} started{}. " + "bindHost={}, connectHost={}, rpcPort={}, webPort={}", RuntimeConstants.VERSION, startMessage, NetworkAddressUtils.getBindAddress(ServiceType.MASTER_RPC), NetworkAddressUtils.getConnectAddress(ServiceType.MASTER_RPC), NetworkAddressUtils.getPort(ServiceType.MASTER_RPC), NetworkAddressUtils.getPort(ServiceType.MASTER_WEB)); startServingRPCServer(); LOG.info("Alluxio master ended{}", stopMessage); } /** * Starts serving web ui server, resetting master web port, adding the metrics servlet to the web * server and starting web ui. */ protected void startServingWebServer() { mWebServer = new MasterWebServer(ServiceType.MASTER_WEB.getServiceName(), NetworkAddressUtils.getBindAddress(ServiceType.MASTER_WEB), this); // reset master web port Configuration.set(PropertyKey.MASTER_WEB_PORT, Integer.toString(mWebServer.getLocalPort())); // Add the metrics servlet to the web server. mWebServer.addHandler(mMetricsServlet.getHandler()); // Add the prometheus metrics servlet to the web server. mWebServer.addHandler(mPMetricsServlet.getHandler()); // start web ui mWebServer.start(); } /** * Starts jvm monitor process, to monitor jvm. */ protected void startJvmMonitorProcess() { if (Configuration.getBoolean(PropertyKey.MASTER_JVM_MONITOR_ENABLED)) { mJvmPauseMonitor = new JvmPauseMonitor(); mJvmPauseMonitor.start(); } } private void registerServices(TMultiplexedProcessor processor, Map<String, TProcessor> services) { for (Map.Entry<String, TProcessor> service : services.entrySet()) { processor.registerProcessor(service.getKey(), service.getValue()); } } /** * Starts the Thrift RPC server. The AlluxioMaster registers the Services of registered * {@link Master}s and meta services to a multiplexed processor, then creates the master thrift * service with the multiplexed processor. */ protected void startServingRPCServer() { // set up multiplexed thrift processors TMultiplexedProcessor processor = new TMultiplexedProcessor(); // register master services for (Master master : mRegistry.getServers()) { registerServices(processor, master.getServices()); } // Return a TTransportFactory based on the authentication type TTransportFactory transportFactory; try { String serverName = NetworkAddressUtils.getConnectHost(ServiceType.MASTER_RPC); transportFactory = new BootstrapServerTransport.Factory( mTransportProvider.getServerTransportFactory(serverName)); } catch (IOException e) { throw new RuntimeException(e); } try { if (mRpcServerSocket == null) { mRpcServerSocket = ThriftUtils.createThriftServerSocket(mRpcBindAddress); } } catch (TTransportException e) { throw new RuntimeException(e); } // create master thrift service with the multiplexed processor. Args args = new TThreadPoolServer.Args(mRpcServerSocket) .maxWorkerThreads(mMaxWorkerThreads) .minWorkerThreads(mMinWorkerThreads) .processor(processor) .transportFactory(transportFactory) .protocolFactory(ThriftUtils.createThriftProtocolFactory()) .stopTimeoutVal((int) TimeUnit.MILLISECONDS .toSeconds(Configuration.getMs(PropertyKey.MASTER_THRIFT_SHUTDOWN_TIMEOUT))); args.stopTimeoutUnit = TimeUnit.SECONDS; mThriftServer = new TThreadPoolServer(args); // start thrift rpc server mSafeModeManager.notifyRpcServerStarted(); mThriftServer.serve(); } /** * Stops serving, trying stop RPC server and web ui server and letting {@link MetricsSystem} stop * all the sinks. */ protected void stopServing() throws Exception { if (mThriftServer != null) { mThriftServer.stop(); mThriftServer = null; } if (mRpcServerSocket != null) { mRpcServerSocket.close(); mRpcServerSocket = null; } if (mJvmPauseMonitor != null) { mJvmPauseMonitor.stop(); } if (mWebServer != null) { mWebServer.stop(); mWebServer = null; } MetricsSystem.stopSinks(); } @Override public String toString() { return "Alluxio master @" + mRpcConnectAddress; } }
/* * Copyright 2017, Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.trace.spi.v1; import static com.google.cloud.trace.spi.v1.PagedResponseWrappers.ListTracesPagedResponse; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.PropertiesProvider; import com.google.api.gax.grpc.CallContext; import com.google.api.gax.grpc.ChannelProvider; import com.google.api.gax.grpc.ClientSettings; import com.google.api.gax.grpc.ExecutorProvider; import com.google.api.gax.grpc.InstantiatingChannelProvider; import com.google.api.gax.grpc.InstantiatingExecutorProvider; import com.google.api.gax.grpc.PageContext; import com.google.api.gax.grpc.PagedCallSettings; import com.google.api.gax.grpc.PagedListDescriptor; import com.google.api.gax.grpc.PagedListResponseFactory; import com.google.api.gax.grpc.SimpleCallSettings; import com.google.api.gax.grpc.UnaryCallSettings; import com.google.api.gax.grpc.UnaryCallable; import com.google.api.gax.retrying.RetrySettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.devtools.cloudtrace.v1.GetTraceRequest; import com.google.devtools.cloudtrace.v1.ListTracesRequest; import com.google.devtools.cloudtrace.v1.ListTracesResponse; import com.google.devtools.cloudtrace.v1.PatchTracesRequest; import com.google.devtools.cloudtrace.v1.Trace; import com.google.protobuf.Empty; import io.grpc.Status; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link TraceServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (cloudtrace.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. For * example, to set the total timeout of patchTraces to 30 seconds: * * <pre> * <code> * TraceServiceSettings.Builder traceServiceSettingsBuilder = * TraceServiceSettings.defaultBuilder(); * traceServiceSettingsBuilder.patchTracesSettings().getRetrySettingsBuilder() * .setTotalTimeout(Duration.ofSeconds(30)); * TraceServiceSettings traceServiceSettings = traceServiceSettingsBuilder.build(); * </code> * </pre> */ @Generated("by GAPIC v0.0.5") @BetaApi public class TraceServiceSettings extends ClientSettings { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/trace.append") .add("https://www.googleapis.com/auth/trace.readonly") .build(); private static final String DEFAULT_GAPIC_NAME = "gapic"; private static final String DEFAULT_GAPIC_VERSION = ""; private static final String PROPERTIES_FILE = "/com/google/cloud/trace/project.properties"; private static final String META_VERSION_KEY = "artifact.version"; private static String gapicVersion; private static final io.grpc.MethodDescriptor<PatchTracesRequest, Empty> METHOD_PATCH_TRACES = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.devtools.cloudtrace.v1.TraceService/PatchTraces", io.grpc.protobuf.ProtoUtils.marshaller(PatchTracesRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(Empty.getDefaultInstance())); private static final io.grpc.MethodDescriptor<GetTraceRequest, Trace> METHOD_GET_TRACE = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.devtools.cloudtrace.v1.TraceService/GetTrace", io.grpc.protobuf.ProtoUtils.marshaller(GetTraceRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(Trace.getDefaultInstance())); private static final io.grpc.MethodDescriptor<ListTracesRequest, ListTracesResponse> METHOD_LIST_TRACES = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.devtools.cloudtrace.v1.TraceService/ListTraces", io.grpc.protobuf.ProtoUtils.marshaller(ListTracesRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(ListTracesResponse.getDefaultInstance())); private final SimpleCallSettings<PatchTracesRequest, Empty> patchTracesSettings; private final SimpleCallSettings<GetTraceRequest, Trace> getTraceSettings; private final PagedCallSettings<ListTracesRequest, ListTracesResponse, ListTracesPagedResponse> listTracesSettings; /** Returns the object with the settings used for calls to patchTraces. */ public SimpleCallSettings<PatchTracesRequest, Empty> patchTracesSettings() { return patchTracesSettings; } /** Returns the object with the settings used for calls to getTrace. */ public SimpleCallSettings<GetTraceRequest, Trace> getTraceSettings() { return getTraceSettings; } /** Returns the object with the settings used for calls to listTraces. */ public PagedCallSettings<ListTracesRequest, ListTracesResponse, ListTracesPagedResponse> listTracesSettings() { return listTracesSettings; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "cloudtrace.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingChannelProvider.Builder defaultChannelProviderBuilder() { return InstantiatingChannelProvider.newBuilder() .setEndpoint(getDefaultEndpoint()) .setGeneratorHeader(DEFAULT_GAPIC_NAME, getGapicVersion()) .setCredentialsProvider(defaultCredentialsProviderBuilder().build()); } private static String getGapicVersion() { if (gapicVersion == null) { gapicVersion = PropertiesProvider.loadProperty( TraceServiceSettings.class, PROPERTIES_FILE, META_VERSION_KEY); gapicVersion = gapicVersion == null ? DEFAULT_GAPIC_VERSION : gapicVersion; } return gapicVersion; } /** Returns a builder for this class with recommended defaults. */ public static Builder defaultBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return new Builder(); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } private TraceServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder.getExecutorProvider(), settingsBuilder.getChannelProvider()); patchTracesSettings = settingsBuilder.patchTracesSettings().build(); getTraceSettings = settingsBuilder.getTraceSettings().build(); listTracesSettings = settingsBuilder.listTracesSettings().build(); } private static final PagedListDescriptor<ListTracesRequest, ListTracesResponse, Trace> LIST_TRACES_PAGE_STR_DESC = new PagedListDescriptor<ListTracesRequest, ListTracesResponse, Trace>() { @Override public String emptyToken() { return ""; } @Override public ListTracesRequest injectToken(ListTracesRequest payload, String token) { return ListTracesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListTracesRequest injectPageSize(ListTracesRequest payload, int pageSize) { throw new UnsupportedOperationException( "page size is not supported by this API method"); } @Override public Integer extractPageSize(ListTracesRequest payload) { throw new UnsupportedOperationException( "page size is not supported by this API method"); } @Override public String extractNextToken(ListTracesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Trace> extractResources(ListTracesResponse payload) { return payload.getTracesList(); } }; private static final PagedListResponseFactory< ListTracesRequest, ListTracesResponse, ListTracesPagedResponse> LIST_TRACES_PAGE_STR_FACT = new PagedListResponseFactory< ListTracesRequest, ListTracesResponse, ListTracesPagedResponse>() { @Override public ApiFuture<ListTracesPagedResponse> getFuturePagedResponse( UnaryCallable<ListTracesRequest, ListTracesResponse> callable, ListTracesRequest request, CallContext context, ApiFuture<ListTracesResponse> futureResponse) { PageContext<ListTracesRequest, ListTracesResponse, Trace> pageContext = PageContext.create(callable, LIST_TRACES_PAGE_STR_DESC, request, context); return ListTracesPagedResponse.createAsync(pageContext, futureResponse); } }; /** Builder for TraceServiceSettings. */ public static class Builder extends ClientSettings.Builder { private final ImmutableList<UnaryCallSettings.Builder> unaryMethodSettingsBuilders; private final SimpleCallSettings.Builder<PatchTracesRequest, Empty> patchTracesSettings; private final SimpleCallSettings.Builder<GetTraceRequest, Trace> getTraceSettings; private final PagedCallSettings.Builder< ListTracesRequest, ListTracesResponse, ListTracesPagedResponse> listTracesSettings; private static final ImmutableMap<String, ImmutableSet<Status.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<Status.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", Sets.immutableEnumSet( Lists.<Status.Code>newArrayList( Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); definitions.put( "non_idempotent", Sets.immutableEnumSet(Lists.<Status.Code>newArrayList(Status.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings.Builder> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings.Builder> definitions = ImmutableMap.builder(); RetrySettings.Builder settingsBuilder = null; settingsBuilder = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.2) .setMaxRetryDelay(Duration.ofMillis(1000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.5) .setMaxRpcTimeout(Duration.ofMillis(30000L)) .setTotalTimeout(Duration.ofMillis(45000L)); definitions.put("default", settingsBuilder); RETRY_PARAM_DEFINITIONS = definitions.build(); } private Builder() { super(defaultChannelProviderBuilder().build()); patchTracesSettings = SimpleCallSettings.newBuilder(METHOD_PATCH_TRACES); getTraceSettings = SimpleCallSettings.newBuilder(METHOD_GET_TRACE); listTracesSettings = PagedCallSettings.newBuilder(METHOD_LIST_TRACES, LIST_TRACES_PAGE_STR_FACT); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder>of( patchTracesSettings, getTraceSettings, listTracesSettings); } private static Builder createDefault() { Builder builder = new Builder(); builder .patchTracesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .getTraceSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .listTracesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); return builder; } private Builder(TraceServiceSettings settings) { super(settings); patchTracesSettings = settings.patchTracesSettings.toBuilder(); getTraceSettings = settings.getTraceSettings.toBuilder(); listTracesSettings = settings.listTracesSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder>of( patchTracesSettings, getTraceSettings, listTracesSettings); } @Override public Builder setExecutorProvider(ExecutorProvider executorProvider) { super.setExecutorProvider(executorProvider); return this; } @Override public Builder setChannelProvider(ChannelProvider channelProvider) { super.setChannelProvider(channelProvider); return this; } /** * Applies the given settings to all of the unary API methods in this service. Only values that * are non-null will be applied, so this method is not capable of un-setting any values. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods(UnaryCallSettings.Builder unaryCallSettings) throws Exception { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, unaryCallSettings); return this; } /** Returns the builder for the settings used for calls to patchTraces. */ public SimpleCallSettings.Builder<PatchTracesRequest, Empty> patchTracesSettings() { return patchTracesSettings; } /** Returns the builder for the settings used for calls to getTrace. */ public SimpleCallSettings.Builder<GetTraceRequest, Trace> getTraceSettings() { return getTraceSettings; } /** Returns the builder for the settings used for calls to listTraces. */ public PagedCallSettings.Builder<ListTracesRequest, ListTracesResponse, ListTracesPagedResponse> listTracesSettings() { return listTracesSettings; } @Override public TraceServiceSettings build() throws IOException { return new TraceServiceSettings(this); } } }
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.remote.work.artifact; import com.thoughtworks.go.config.ArtifactStore; import com.thoughtworks.go.config.ArtifactStores; import com.thoughtworks.go.config.PluggableArtifactConfig; import com.thoughtworks.go.domain.*; import com.thoughtworks.go.plugin.access.artifact.ArtifactExtension; import com.thoughtworks.go.plugin.access.artifact.model.PublishArtifactResponse; import com.thoughtworks.go.plugin.infra.PluginRequestProcessorRegistry; import com.thoughtworks.go.util.TestFileUtil; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import com.thoughtworks.go.work.GoPublisher; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.Assumptions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.mockito.InOrder; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; import static com.thoughtworks.go.domain.packagerepository.ConfigurationPropertyMother.create; import static com.thoughtworks.go.remote.work.artifact.ArtifactRequestProcessor.Request.CONSOLE_LOG; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.*; public class ArtifactsPublisherTest { private static final boolean IS_WINDOWS = System.getProperty("os.name").startsWith("Windows"); @TempDir File workingFolder; private ArtifactsPublisher artifactsPublisher; private ArtifactExtension artifactExtension; private StubGoPublisher publisher; private PluginRequestProcessorRegistry registry; private final EnvironmentVariableContext env = new EnvironmentVariableContext("foo", "bar"); @BeforeEach public void setUp() throws IOException { artifactExtension = mock(ArtifactExtension.class); registry = mock(PluginRequestProcessorRegistry.class); publisher = new StubGoPublisher(); artifactsPublisher = new ArtifactsPublisher(publisher, artifactExtension, new ArtifactStores(), registry, workingFolder); File file = new File(workingFolder, "cruise-output/log.xml"); file.getParentFile().mkdirs(); file.createNewFile(); } @Test public void shouldMergeTestReportFilesAndUploadResult() throws Exception { List<ArtifactPlan> artifactPlans = new ArrayList<>(); new DefaultJobPlan(new Resources(), artifactPlans, -1, null, null, new EnvironmentVariables(), new EnvironmentVariables(), null, null); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "test1", "test")); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "test2", "test")); final File firstTestFolder = prepareTestFolder(workingFolder, "test1"); final File secondTestFolder = prepareTestFolder(workingFolder, "test2"); artifactsPublisher.publishArtifacts(artifactPlans, env); publisher.assertPublished(firstTestFolder.getAbsolutePath(), "test"); publisher.assertPublished(secondTestFolder.getAbsolutePath(), "test"); publisher.assertPublished("result", "testoutput"); publisher.assertPublished("result" + File.separator + "index.html", "testoutput"); } @Test public void shouldReportErrorWithTestArtifactSrcWhenUploadFails() throws Exception { List<ArtifactPlan> artifactPlans = new ArrayList<>(); new DefaultJobPlan(new Resources(), artifactPlans, -1, null, null, new EnvironmentVariables(), new EnvironmentVariables(), null, null); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "test1", "test")); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "test2", "test")); prepareTestFolder(workingFolder, "test1"); prepareTestFolder(workingFolder, "test2"); publisher.setShouldFail(true); try { artifactsPublisher.publishArtifacts(artifactPlans, env); } catch (Exception e) { assertThat(e.getMessage(), containsString("Failed to upload [test1, test2]")); } } @Test public void shouldUploadFilesCorrectly() throws Exception { List<ArtifactPlan> artifactPlans = new ArrayList<>(); final File src1 = TestFileUtil.createTestFolder(workingFolder, "src1"); TestFileUtil.createTestFile(src1, "test.txt"); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.file, src1.getName(), "dest")); final File src2 = TestFileUtil.createTestFolder(workingFolder, "src2"); TestFileUtil.createTestFile(src1, "test.txt"); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.file, src2.getName(), "test")); StubGoPublisher publisher = new StubGoPublisher(); new ArtifactsPublisher(publisher, artifactExtension, new ArtifactStores(), registry, workingFolder).publishArtifacts(artifactPlans, env); Map<File, String> expectedFiles = new HashMap<File, String>() { { put(src1, "dest"); put(src2, "test"); } }; assertThat(publisher.publishedFiles(), is(expectedFiles)); } @Test public void shouldUploadFilesWhichMatchedWildCard() throws Exception { List<ArtifactPlan> artifactPlans = new ArrayList<>(); final File src1 = TestFileUtil.createTestFolder(workingFolder, "src1"); final File testFile1 = TestFileUtil.createTestFile(src1, "test1.txt"); final File testFile2 = TestFileUtil.createTestFile(src1, "test2.txt"); final File testFile3 = TestFileUtil.createTestFile(src1, "readme.pdf"); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.file, src1.getName() + "/*", "dest")); artifactsPublisher.publishArtifacts(artifactPlans, env); Map<File, String> expectedFiles = new HashMap<File, String>() { { put(testFile1, "dest"); put(testFile2, "dest"); put(testFile3, "dest"); } }; assertThat(publisher.publishedFiles(), is(expectedFiles)); } @Test public void shouldPublishPluggableArtifactsAndUploadMetadataFileToServer() throws IOException { final ArtifactStore s3ArtifactStore = new ArtifactStore("s3", "cd.go.s3", create("access_key", false, "some-key")); final ArtifactStore dockerArtifactStore = new ArtifactStore("docker", "cd.go.docker", create("registry-url", false, "docker.io")); final ArtifactStores artifactStores = new ArtifactStores(s3ArtifactStore, dockerArtifactStore); final ArtifactPlan s3ArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); final ArtifactPlan dockerArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("test-reports", "docker", create("junit", false, "junit.xml"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(s3ArtifactPlan), eq(s3ArtifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("src", "s3://dist"))); when(artifactExtension.publishArtifact(eq("cd.go.docker"), eq(dockerArtifactPlan), eq(dockerArtifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("image", "alpine"))); new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(s3ArtifactPlan, dockerArtifactPlan), env); assertThat(uploadedPluggableMetadataFiles(publisher.publishedFiles()), containsInAnyOrder("cd.go.s3.json", "cd.go.docker.json")); } @Test public void shouldNotUploadMetadataFileWhenPublishPluggableArtifactIsUnsuccessful() { final ArtifactStore artifactStore = new ArtifactStore("s3", "cd.go.s3", create("Foo", false, "Bar")); final ArtifactStores artifactStores = new ArtifactStores(artifactStore); final ArtifactPlan artifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(artifactPlan), eq(artifactStore), anyString(), eq(env))).thenThrow(new RuntimeException("something")); try { new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(artifactPlan), env); fail("Should throw error for pluggable artifact [installers]."); } catch (Exception e) { assertThat(publisher.publishedFiles().size(), is(0)); assertThat(e.getMessage(), containsString("[go] Uploading finished. Failed to upload [installers].")); } } @Test public void shouldErrorOutWhenFailedToCreateFolderToWritePluggableArtifactMetadata() { Assumptions.assumeFalse(IS_WINDOWS, "Do not run on windows."); final ArtifactStore artifactStore = new ArtifactStore("s3", "cd.go.s3", create("Foo", false, "Bar")); final ArtifactStores artifactStores = new ArtifactStores(artifactStore); final ArtifactPlan artifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(artifactPlan), eq(artifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("Foo", "Bar"))); workingFolder.setWritable(false); assertThatThrownBy(() -> new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(artifactPlan), env)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("[go] Could not create pluggable artifact metadata folder"); } @Test public void shouldContinueWithOtherPluginWhenPublishArtifactCallFailsForOnePlugin() throws IOException { final ArtifactStore s3ArtifactStore = new ArtifactStore("s3", "cd.go.s3", create("access_key", false, "some-key")); final ArtifactStore dockerArtifactStore = new ArtifactStore("docker", "cd.go.docker", create("registry-url", false, "docker.io")); final ArtifactStores artifactStores = new ArtifactStores(s3ArtifactStore, dockerArtifactStore); final ArtifactPlan s3ArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); final ArtifactPlan dockerArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("test-reports", "docker", create("junit", false, "junit.xml"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(s3ArtifactPlan), eq(s3ArtifactStore), anyString(), eq(env))) .thenThrow(new RuntimeException("Interaction with plugin `cd.go.s3` failed.")); when(artifactExtension.publishArtifact(eq("cd.go.docker"), eq(dockerArtifactPlan), eq(dockerArtifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("tag", "10.12.0"))); try { new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(s3ArtifactPlan, dockerArtifactPlan), env); fail("Should throw error for pluggable artifact [installers]."); } catch (Exception e) { assertThat(uploadedPluggableMetadataFiles(publisher.publishedFiles()), containsInAnyOrder("cd.go.docker.json")); assertThat(publisher.getMessage(), containsString("[go] Interaction with plugin `cd.go.s3` failed")); assertThat(e.getMessage(), containsString("[go] Uploading finished. Failed to upload [installers].")); } } private Set<String> uploadedPluggableMetadataFiles(Map<File, String> actual) { final HashSet<String> filesUploaded = new HashSet<>(); for (Map.Entry<File, String> entry : actual.entrySet()) { if (entry.getValue().equals("pluggable-artifact-metadata")) { filesUploaded.add(entry.getKey().getName()); } } return filesUploaded; } @Test public void shouldAddPluggableArtifactMetadataFileArtifactPlanAtTop() throws Exception { TestFileUtil.createTestFile(workingFolder, "installer.zip"); TestFileUtil.createTestFile(workingFolder, "testreports.xml"); final ArtifactStore artifactStore = new ArtifactStore("s3", "cd.go.s3", create("Foo", false, "Bar")); final ArtifactStores artifactStores = new ArtifactStores(artifactStore); final ArtifactPlan artifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); List<ArtifactPlan> artifactPlans = Arrays.asList( new ArtifactPlan(ArtifactPlanType.file, "installer.zip", "dist"), new ArtifactPlan(ArtifactPlanType.unit, "testreports.xml", "testreports"), artifactPlan ); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(artifactPlan), eq(artifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("Foo", "Bar"))); final GoPublisher publisher = mock(GoPublisher.class); new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(artifactPlans, env); InOrder inOrder = inOrder(publisher); inOrder.verify(publisher).upload(any(), eq("pluggable-artifact-metadata")); inOrder.verify(publisher).upload(any(), eq("dist")); inOrder.verify(publisher).upload(any(), eq("testreports")); } @Test public void shouldDeletePluggableArtifactMetadataDirectory() throws Exception { TestFileUtil.createTestFile(workingFolder, "installer.zip"); TestFileUtil.createTestFile(workingFolder, "testreports.xml"); final ArtifactStore artifactStore = new ArtifactStore("s3", "cd.go.s3", create("Foo", false, "Bar")); final ArtifactStores artifactStores = new ArtifactStores(artifactStore); final ArtifactPlan artifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); List<ArtifactPlan> artifactPlans = Arrays.asList( new ArtifactPlan(ArtifactPlanType.file, "installer.zip", "dist"), new ArtifactPlan(ArtifactPlanType.unit, "testreports.xml", "testreports"), artifactPlan ); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(artifactPlan), eq(artifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("Foo", "Bar"))); final GoPublisher publisher = mock(GoPublisher.class); assertThat(Arrays.asList(workingFolder.list()), containsInAnyOrder("testreports.xml", "installer.zip", "cruise-output")); new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(artifactPlans, env); assertThat(Arrays.asList(workingFolder.list()), containsInAnyOrder("testreports.xml", "installer.zip", "cruise-output")); } @Test public void shouldRegisterAndDeRegisterArtifactRequestProcessBeforeAndAfterPublishingPluggableArtifact() { final ArtifactStore s3ArtifactStore = new ArtifactStore("s3", "cd.go.s3", create("access_key", false, "some-key")); final ArtifactStores artifactStores = new ArtifactStores(s3ArtifactStore); final ArtifactPlan s3ArtifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(s3ArtifactPlan), eq(s3ArtifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("src", "s3://dist"))); new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(Arrays.asList(s3ArtifactPlan), env); InOrder inOrder = inOrder(registry, artifactExtension); inOrder.verify(registry, times(1)).registerProcessorFor(eq(CONSOLE_LOG.requestName()), any(ArtifactRequestProcessor.class)); inOrder.verify(artifactExtension, times(1)) .publishArtifact("cd.go.s3", s3ArtifactPlan, s3ArtifactStore, workingFolder.getAbsolutePath(), env); inOrder.verify(registry, times(1)).removeProcessorFor(CONSOLE_LOG.requestName()); } private File prepareTestFolder(File workingFolder, String folderName) throws Exception { File testFolder = TestFileUtil.createTestFolder(workingFolder, folderName); File testFile = TestFileUtil.createTestFile(testFolder, "testFile.xml"); String content = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<testsuite errors=\"0\" failures=\"0\" tests=\"7\" time=\"0.429\" >\n" + "<testcase/>\n" + "</testsuite>\n"; FileUtils.writeStringToFile(testFile, content, StandardCharsets.UTF_8); return testFolder; } }
package com.solderbyte.notifyte; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.UUID; import android.annotation.SuppressLint; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothGatt; import android.bluetooth.BluetoothGattCallback; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattDescriptor; import android.bluetooth.BluetoothGattService; import android.bluetooth.BluetoothManager; import android.bluetooth.BluetoothProfile; import android.bluetooth.BluetoothServerSocket; import android.bluetooth.BluetoothSocket; import android.bluetooth.le.BluetoothLeScanner; import android.bluetooth.le.ScanCallback; import android.bluetooth.le.ScanResult; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Binder; import android.os.Build; import android.os.IBinder; import android.util.Log; public class BluetoothLeService extends Service { private static final String LOG_TAG = "Notifyte:BluetoothLe"; // states public static boolean isEnabled = false; public static boolean isConnected = false; public static boolean isScanning = false; public static boolean isRemaining = false; public static boolean isReceiving = false; public static boolean isReconnecting = false; public static boolean canReconnect = false; public static volatile boolean isThreadRunning = false; // bluetooth private static BluetoothAdapter bluetoothAdapter; private static BluetoothManager bluetoothManager; private static BluetoothGatt bluetoothGatt; private static BluetoothDevice bluetoothDevice; private static BluetoothSocket bluetoothSocket; private static BluetoothServerSocket bluetoothServerSocket; private static BluetoothGattCharacteristic bluetoothCharacteristic; private static BluetoothGattDescriptor bluetoothDescriptor; public static InputStream inputStream; public static OutputStream outputStream; // bluetooth states private String bluetoothAddress; private static Set<BluetoothDevice> pairedDevices; private static Set<BluetoothDevice> scannedDevices; // threads private static BluetoothThread bluetoothThread; private static ConnectBluetoothThread connectThread; private static EnableBluetoothThread enableThread; private static ReconnectBluetoothThread reconnectThread; // globals private Context context; private final static long SCAN_PERIOD = 7000; private final static int BYTE_MAX = 512; private final static int BYTE_MTU = 20; private byte[] BYTE_BUFFER = null; ArrayList BYTE_RX_BUFFER = new ArrayList(); private static int RECONNECT_ATTEMPTS = 0; private static long RECCONECT_TIME = 0; public final static String EXTRA_DATA = "EXTRA_DATA"; public final static String ACTION_DATA_AVAILABLE = "ACTION_DATA_AVAILABLE"; public final static String ACTION_GATT_CONNECTED = "ACTION_GATT_CONNECTED"; public final static String ACTION_GATT_DISCONNECTED = "ACTION_GATT_DISCONNECTED"; public final static String ACTION_GATT_SERVICES_DISCOVERED = "ACTION_GATT_SERVICES_DISCOVERED"; private static final UUID NOTIFYTE_MOBILE_UUID = UUID.fromString("0000fffa-0000-1000-8000-00805f9b34fb"); private static final UUID NOTIFYTE_DESKTOP_UUID = UUID.fromString("0000fff0-0000-1000-8000-00805f9b34fb"); private static final UUID NOTIFYTE_CHAR_UUID = UUID.fromString("0000fff1-0000-1000-8000-00805f9b34fb"); private static final UUID NOTIFYTE_DESC_UUID = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb"); // gatt states public static HashMap<Integer, String> gattStatus = new HashMap<Integer, String>() {{ put(0, "GATT_SUCCESS"); put(2, "GATT_READ_NOT_PERMITTED"); put(3, "GATT_WRITE_NOT_PERMITTED"); put(5, "GATT_INSUFFICIENT_AUTHENTICATION"); put(6, "GATT_REQUEST_NOT_SUPPORTED"); put(7, "GATT_INVALID_OFFSET"); put(13, "GATT_INVALID_ATTRIBUTE_LENGTH"); put(15, "GATT_INSUFFICIENT_ENCRYPTION"); put(143, "GATT_CONNECTION_CONGESTED"); put(257, "GATT_FAILURE"); }}; public static HashMap<Integer, String> gattState = new HashMap<Integer, String>() {{ put(0, "STATE_DISCONNECTED"); put(1, "STATE_CONNECTING"); put(2, "STATE_CONNECTED"); put(3, "STATE_DISCONNECTING"); }}; public static HashMap<Integer, String> gattServiceType = new HashMap<Integer, String>() {{ put(0, "SERVICE_TYPE_PRIMARY"); put(1, "SERVICE_TYPE_SECONDARY"); }}; public static HashMap<Integer, String> gattCharacteristicPermission = new HashMap<Integer, String>() {{ put(1, "PERMISSION_READ"); put(2, "PERMISSION_READ_ENCRYPTED"); put(4, "PERMISSION_READ_ENCRYPTED_MITM"); put(16, "PERMISSION_WRITE"); put(32, "PERMISSION_WRITE_ENCRYPTED"); put(64, "PERMISSION_WRITE_ENCRYPTED_MITM"); put(128, "PERMISSION_WRITE_SIGNED"); put(256, "PERMISSION_WRITE_SIGNED_MITM"); }}; public static HashMap<Integer, String> gattCharacteristicProperty = new HashMap<Integer, String>() {{ put(1, "PROPERTY_BROADCAST"); put(2, "PROPERTY_READ"); put(4, "PROPERTY_WRITE_NO_RESPONSE"); put(8, "PROPERTY_WRITE"); put(16, "PROPERTY_NOTIFY"); put(32, "PROPERTY_INDICATE"); put(64, "PROPERTY_SIGNED_WRITE"); put(128, "PROPERTY_EXTENDED_PROPS"); }}; public static HashMap<Integer, String> gattCharacteristicWriteType = new HashMap<Integer, String>() {{ put(1, "WRITE_TYPE_NO_RESPONSE"); put(2, "WRITE_TYPE_DEFAULT"); put(4, "WRITE_TYPE_SIGNED"); }}; private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() { @Override public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) { Log.d(LOG_TAG, "onConnectionStateChange: " + status + ":" + gattStatus.get(status) + ":" + gattState.get(newState)); if(newState == BluetoothProfile.STATE_CONNECTED) { isConnected = true; BluetoothLeService.this.stopReconnectBle(); Log.d(LOG_TAG, "Connected"); BluetoothLeService.this.broadcastUpdate(ACTION_GATT_CONNECTED); if(context != null) { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_CONNECTED); context.sendBroadcast(msg); } // attempts to discover services after successful connection. bluetoothGatt.discoverServices(); } else if(newState == BluetoothProfile.STATE_DISCONNECTED) { isConnected = false; Log.d(LOG_TAG, "Disconnected"); BluetoothLeService.this.broadcastUpdate(ACTION_GATT_DISCONNECTED); if(context != null) { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DISCONNECTED); context.sendBroadcast(msg); } if(!isReconnecting) { BluetoothLeService.this.reconnectBle(); } } Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_CONNECTING); msg.putExtra(Intents.INTENT_EXTRA_DATA, false); context.sendBroadcast(msg); } @Override public void onServicesDiscovered(BluetoothGatt gatt, int status) { Log.d(LOG_TAG, "onServicesDiscovered: " + gattStatus.get(status)); if(status == BluetoothGatt.GATT_SUCCESS) { // loops through available GATT Services. for(BluetoothGattService gattService : gatt.getServices()) { String uuid = gattService.getUuid().toString(); String type = gattServiceType.get(gattService.getType()); Log.d(LOG_TAG, "gattService: " + gattService); Log.d(LOG_TAG, "gattService type: " + type); Log.d(LOG_TAG, "gattService uuid: " + uuid); // get characteristic when UUID matches if(uuid.equals(BluetoothLeService.NOTIFYTE_DESKTOP_UUID.toString())) { Log.d(LOG_TAG, "Service Found: Notifyte Desktop App"); bluetoothCharacteristic = gattService.getCharacteristic(BluetoothLeService.NOTIFYTE_CHAR_UUID); gatt.setCharacteristicNotification(bluetoothCharacteristic, true); Log.d(LOG_TAG, "bluetoothCharacteristic: " + bluetoothCharacteristic); bluetoothDescriptor = bluetoothCharacteristic.getDescriptor(BluetoothLeService.NOTIFYTE_DESC_UUID); bluetoothDescriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE); gatt.writeDescriptor(bluetoothDescriptor); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { boolean mtu = gatt.requestMtu(BluetoothLeService.BYTE_MAX); Log.d(LOG_TAG, "requestMtu: " + mtu + " : " + BluetoothLeService.BYTE_MAX); } else { Log.d(LOG_TAG, "requestMtu: " + 20); } Log.d(LOG_TAG, "bluetoothDescriptor: " + bluetoothDescriptor); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_CONNECTED_DESKTOP); context.sendBroadcast(msg); } for(BluetoothGattCharacteristic gattCharacteristic : gattService.getCharacteristics()) { String cUuid = gattCharacteristic.getUuid().toString(); int cInstanceId = gattCharacteristic.getInstanceId(); int cPermissions = gattCharacteristic.getPermissions(); int cProperties = gattCharacteristic.getProperties(); byte[] cValue = gattCharacteristic.getValue(); int cWriteType = gattCharacteristic.getWriteType(); // Log.d(LOG_TAG, "gattCharacteristic cUuid: " + cUuid); // Log.d(LOG_TAG, "gattCharacteristic cInstanceId: " + cInstanceId); // Log.d(LOG_TAG, "gattCharacteristic cPermissions: " + cPermissions + ":" + gattCharacteristicPermission.get(cPermissions)); // Log.d(LOG_TAG, "gattCharacteristic cProperties: " + cProperties + ":" + gattCharacteristicProperty.get(cProperties)); // Log.d(LOG_TAG, "gattCharacteristic cValue: " + cValue); // Log.d(LOG_TAG, "gattCharacteristic cWriteType: " + cWriteType + ":" + gattCharacteristicWriteType.get(cWriteType)); } } BluetoothLeService.this.broadcastUpdate(ACTION_GATT_SERVICES_DISCOVERED); } else { Log.d(LOG_TAG, "onServicesDiscovered: none"); } } @Override public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) { Log.d(LOG_TAG, "onCharacteristicRead: " + characteristic + ":" + gattStatus.get(status)); if(status == BluetoothGatt.GATT_SUCCESS) { BluetoothLeService.this.broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic); } } @Override public void onCharacteristicWrite(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) { Log.d(LOG_TAG, "onCharacteristicWrite: " + characteristic + ":" + gattStatus.get(status)); //BluetoothLeService.this.broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic); if(isRemaining) { Log.d(LOG_TAG, "onCharacteristicWrite: isRemaining"); isRemaining = false; BluetoothLeService.this.writeBle(BYTE_BUFFER); } if(gattStatus.get(status).equals("GATT_INVALID_ATTRIBUTE_LENGTH")) { } } @Override public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) { Log.d(LOG_TAG, "onCharacteristicChanged: " + characteristic); BluetoothLeService.this.broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic); } }; private void broadcastUpdate(final String action) { Log.d(LOG_TAG, "broadcastUpdate: " + action); final Intent intent = new Intent(action); this.sendBroadcast(intent); } private void broadcastUpdate(final String action, final BluetoothGattCharacteristic characteristic) { Log.d(LOG_TAG, "broadcastUpdate: " + characteristic); final Intent intent = new Intent(action); // for all other profiles, writes the data formatted in HEX. final byte[] data = characteristic.getValue(); if(isReceiving) { Log.d(LOG_TAG, "adding to buffer"); BYTE_RX_BUFFER.add(data); } else { Log.d(LOG_TAG, "creating new buffer"); BYTE_RX_BUFFER = new ArrayList(); BYTE_RX_BUFFER.add(data); } if(data.length == BYTE_MTU) { Log.d(LOG_TAG, "isReceiving"); isReceiving = true; } else { Log.d(LOG_TAG, "isNotReceiving"); isReceiving = false; } if(data != null && data.length > 0) { String str = new String(data); intent.putExtra(EXTRA_DATA, new String(data)); if(str.contains("\0")) { isReceiving = false; String message = new String(); for(int i = 0; i < BYTE_RX_BUFFER.size(); i++) { byte[] b = (byte[]) BYTE_RX_BUFFER.get(i); String temp = new String(b); message += temp; } BYTE_RX_BUFFER = new ArrayList(); Log.d(LOG_TAG, "End of message: " + message); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_NOTIFICATION); msg.putExtra(Intents.INTENT_EXTRA_DATA, message); context.sendBroadcast(msg); } } this.sendBroadcast(intent); } public class LocalBinder extends Binder { BluetoothLeService getService() { Log.d(LOG_TAG, "getService"); return BluetoothLeService.this; } } @Override public IBinder onBind(Intent intent) { Log.d(LOG_TAG, "onBind"); return iBinder; } @Override public boolean onUnbind(Intent intent) { Log.d(LOG_TAG, "onUnbind"); this.stopSelf(); this.close(); return super.onUnbind(intent); } private final IBinder iBinder = new LocalBinder(); public void readCharacteristic(BluetoothGattCharacteristic characteristic) { if(bluetoothAdapter == null || bluetoothGatt == null) { Log.w(LOG_TAG, "bluetoothAdapter is null or bluetoothGatt is null"); return; } Log.d(LOG_TAG, "readCharacteristic"); bluetoothGatt.readCharacteristic(characteristic); } public void setCharacteristicNotification(BluetoothGattCharacteristic characteristic, boolean enabled) { if(bluetoothAdapter == null || bluetoothGatt == null) { Log.w(LOG_TAG, "bluetoothAdapter is null or bluetoothGatt is null"); return; } Log.d(LOG_TAG, "setCharacteristicNotification: " + enabled); bluetoothGatt.setCharacteristicNotification(characteristic, enabled); } public void writeCharacteristic(BluetoothGattCharacteristic characteristic) { if(bluetoothAdapter == null || bluetoothGatt == null) { Log.w(LOG_TAG, "bluetoothAdapter is null or bluetoothGatt is null"); return; } Log.d(LOG_TAG, "writeCharacteristic"); bluetoothGatt.writeCharacteristic(characteristic); } public List<BluetoothGattService> getSupportedGattServices() { if(bluetoothGatt == null) { Log.w(LOG_TAG, "bluetoothGatt is null"); return null; } return bluetoothGatt.getServices(); } public boolean initialize() { Log.d(LOG_TAG, "initialize"); context = BluetoothLeService.this.getApplicationContext(); scannedDevices = new LinkedHashSet<BluetoothDevice>(); if(bluetoothManager == null) { bluetoothManager = (BluetoothManager) BluetoothLeService.this.getSystemService(Context.BLUETOOTH_SERVICE); if(bluetoothManager == null) { Log.e(LOG_TAG, "initialize failed. bluetoothManager is null"); return false; } } bluetoothAdapter = bluetoothManager.getAdapter(); if(bluetoothAdapter == null) { Log.e(LOG_TAG, "initialize failed. bluetoothAdapter is null"); return false; } if(bluetoothAdapter.isEnabled()) { isEnabled = true; Log.d(LOG_TAG, "bluetoothAdapter is enabled"); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_ENABLED); context.sendBroadcast(msg); } else { isEnabled = false; Log.d(LOG_TAG, "bluetoothAdapter is not enabled"); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DISABLED); context.sendBroadcast(msg); } return true; } public boolean connectBle(final String address) { Log.d(LOG_TAG, "connectBle: " + address); if(bluetoothAdapter == null || address == null) { Log.d(LOG_TAG, "bluetoothAdapter is null or address is null"); return false; } if(bluetoothAddress != null && address.equals(bluetoothAddress) && bluetoothGatt != null) { if(bluetoothGatt.connect()) { this.forceConnectBle(); return true; } else { Log.d(LOG_TAG, "could not force reconnect"); return false; } } final BluetoothDevice device = bluetoothAdapter.getRemoteDevice(address); if(device == null) { Log.d(LOG_TAG, "BluetoothDevice is null"); return false; } // auto connectBle to the device Log.d(LOG_TAG, "connecting..."); bluetoothGatt = device.connectGatt(this, false, mGattCallback); bluetoothAddress = address; Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_CONNECTING); msg.putExtra(Intents.INTENT_EXTRA_DATA, true); context.sendBroadcast(msg); return true; } public void disconnectBle() { Log.d(LOG_TAG, "disconnectBle"); if(bluetoothGatt == null) { Log.d(LOG_TAG, "bluetoothAdapter is null or bluetoothGatt is null"); return; } bluetoothGatt.disconnect(); } public void forceConnectBle() { Log.d(LOG_TAG, "forceConnectBle"); if(bluetoothAddress != null) { this.disconnectBle(); Log.d(LOG_TAG, "force connecting..."); final BluetoothDevice device = bluetoothAdapter.getRemoteDevice(bluetoothAddress); bluetoothGatt = device.connectGatt(this, false, mGattCallback); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_CONNECTING); msg.putExtra(Intents.INTENT_EXTRA_DATA, true); context.sendBroadcast(msg); } else { Log.d(LOG_TAG, "bluetoothAddress is null"); } } public void writeBle(byte[] bytes) { Log.d(LOG_TAG, "writeBle: " + bytes.length); if(isEnabled) { if(isConnected) { if(bluetoothCharacteristic != null) { if(bytes.length > BYTE_MAX) { byte[] bytesToSend = Arrays.copyOfRange(bytes, 0, BYTE_MAX); BYTE_BUFFER = Arrays.copyOfRange(bytes, BYTE_MAX, bytes.length); isRemaining = true; bytes = bytesToSend; } if(bluetoothCharacteristic.setValue(bytes)) { this.writeCharacteristic(bluetoothCharacteristic); this.setCharacteristicNotification(bluetoothCharacteristic, true); } else { Log.d(LOG_TAG, "bluetoothCharacteristic could not be set"); } } else { Log.d(LOG_TAG, "bluetoothCharacteristic is null"); } } else { Log.d(LOG_TAG, "Bluetooth not connected"); } } else { Log.d(LOG_TAG, "Bluetooth not enabled"); } } public void setReconnect(boolean value) { canReconnect = value; } public void reconnectBle() { Log.d(LOG_TAG, "reconnectBle"); if(canReconnect) { isReconnecting = true; reconnectThread = new ReconnectBluetoothThread(); reconnectThread.start(); } } public void stopReconnectBle() { Log.d(LOG_TAG, "stopReconnectBle"); RECONNECT_ATTEMPTS = 0; isReconnecting = false; if(reconnectThread != null) { reconnectThread.close(); reconnectThread = null; } } public void close() { Log.d(LOG_TAG, "close"); if(bluetoothGatt != null) { bluetoothGatt.close(); bluetoothGatt = null; } if(connectThread != null) { connectThread.close(); connectThread = null; } } public void enableBluetooth() { Log.d(LOG_TAG, "enableBluetooth"); if(!bluetoothAdapter.isEnabled()) { enableThread = new EnableBluetoothThread(); enableThread.start(); } else { Log.d(LOG_TAG, "BluetoothAdapter is enabled"); } } public void disableBluetooth() { Log.d(LOG_TAG, "disableBluetooth"); bluetoothAdapter.disable(); isEnabled = false; } public void connectRfcomm() { Log.d(LOG_TAG, "connectRfcomm"); if(!bluetoothAdapter.isEnabled()) { Log.d(LOG_TAG, "bluetoothAdapter is not enabled"); return; } if(bluetoothDevice != null) { connectThread = new ConnectBluetoothThread(); connectThread.start(); } else { Log.d(LOG_TAG, "bluetoothDevice is null"); } } public void disconnectRfcomm() { Log.d(LOG_TAG, "disconnectRfcomm"); if(bluetoothDevice != null && isConnected) { connectThread.close(); } else { Log.d(LOG_TAG, "bluetoothDevice is null or not connected"); } } public void writeRfComm(byte[] bytes) { Log.d(LOG_TAG, "writeRfComm"); if(bluetoothThread != null) { bluetoothThread.write(bytes); } else { Log.d(LOG_TAG, "bluetoothThread is null"); } } public void setAddress(String address) { Log.d(LOG_TAG, "setAddress: " + address); bluetoothAddress = address; setBluetoothDevice(bluetoothAddress); } public static void setBluetoothDevice(String address) { Log.d(LOG_TAG, "setBluetoothDevice: " + address); // loop through devices if(pairedDevices != null) { Log.d(LOG_TAG, "setting from paired devices"); for(BluetoothDevice device : pairedDevices) { if(device.getAddress().equals(address)) { Log.d(LOG_TAG, "Set paired device: " + device.getName() + ":" + device.getAddress()); bluetoothDevice = device; return; } } } if(scannedDevices.size() > 0) { Log.d(LOG_TAG, "setting from scanned devices"); for(BluetoothDevice device : scannedDevices) { if(device.getAddress().equals(address)) { Log.d(LOG_TAG, "Set scanned device: " + device.getName() + ":" + device.getAddress()); bluetoothDevice = device; return; } } } if(pairedDevices == null && scannedDevices.size() <= 0) { Log.d(LOG_TAG, "pairedDevices and scannedDevices are null or empty"); } } public void setEntries() { Log.d(LOG_TAG, "setEntries"); if(isEnabled) { List<CharSequence> entries = new ArrayList<CharSequence>(); List<CharSequence> values = new ArrayList<CharSequence>(); pairedDevices = bluetoothAdapter.getBondedDevices(); // loop through paired devices if(pairedDevices.size() > 0) { for(BluetoothDevice device : pairedDevices) { String deviceName = device.getName(); String deviceAddr = device.getAddress(); Log.d(LOG_TAG, "Paired Device: " + deviceName + ":" + deviceAddr); if(deviceName != null && !deviceName.isEmpty() && deviceAddr != null && !deviceAddr.isEmpty()) { entries.add(deviceName); values.add(deviceAddr); } } } else { Log.d(LOG_TAG, "No pairedDevices"); } // loop trough scanned devices if(scannedDevices.size() > 0) { for(BluetoothDevice device : scannedDevices) { // make sure we dont add duplicates if(!entries.contains(device.getName())) { String deviceName = device.getName(); String deviceAddr = device.getAddress(); Log.d(LOG_TAG, "Scanned Device: " + deviceName + ":" + deviceAddr); if(deviceName != null && !deviceName.isEmpty() && deviceAddr != null && !deviceAddr.isEmpty()) { entries.add(deviceName); values.add(deviceAddr); } } } } else { Log.d(LOG_TAG, "No scannedDevices"); } CharSequence[] pairedEntries = entries.toArray(new CharSequence[entries.size()]); CharSequence[] pairedEntryValues = values.toArray(new CharSequence[values.size()]); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DEVICES_LIST); msg.putExtra(Intents.INTENT_BLUETOOTH_ENTRIES, pairedEntries); msg.putExtra(Intents.INTENT_BLUETOOTH_VALUES, pairedEntryValues); context.sendBroadcast(msg); } else { Log.d(LOG_TAG, "Bluetooth is not enabled"); } } @SuppressLint("NewApi") public void scan() { Log.d(LOG_TAG, "scan"); if(isEnabled) { if(context != null) { if(!isScanning) { if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { Log.d(LOG_TAG, "scanning with startLeScan: " + SCAN_PERIOD + "ms"); new android.os.Handler().postDelayed(new Runnable() { @Override public void run() { isScanning = false; bluetoothAdapter.stopLeScan(mLeScanCallback); Log.d(LOG_TAG, "scanned stopped"); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_SCAN_STOPPED); context.sendBroadcast(msg); BluetoothLeService.this.setEntries(); } }, SCAN_PERIOD); isScanning = true; bluetoothAdapter.startLeScan(mLeScanCallback); } else if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { Log.d(LOG_TAG, "scanning with startScan: " + SCAN_PERIOD + "ms"); new android.os.Handler().postDelayed(new Runnable() { @Override public void run() { isScanning = false; BluetoothLeScanner mBluetoothLeScanner = bluetoothAdapter.getBluetoothLeScanner(); mBluetoothLeScanner.stopScan(mScanCallback); Log.d(LOG_TAG, "scanned stopped"); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_SCAN_STOPPED); context.sendBroadcast(msg); BluetoothLeService.this.setEntries(); } }, SCAN_PERIOD); isScanning = true; BluetoothLeScanner mBluetoothLeScanner = bluetoothAdapter.getBluetoothLeScanner(); mBluetoothLeScanner.startScan(mScanCallback); } else { Log.d(LOG_TAG, "scanning with startDiscovery: " + SCAN_PERIOD + "ms"); // Bluetooth Classic new android.os.Handler().postDelayed(new Runnable() { public void run() { isScanning = false; bluetoothAdapter.cancelDiscovery(); Log.d(LOG_TAG, "scanned stopped"); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_SCAN_STOPPED); context.sendBroadcast(msg); BluetoothLeService.this.setEntries(); BluetoothLeService.this.unregisterReceiver(scanReceiver); } }, SCAN_PERIOD); IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND); this.registerReceiver(scanReceiver, filter); isScanning = true; bluetoothAdapter.startDiscovery(); } } else { Log.d(LOG_TAG, "currently scanning"); } } else{ Log.d(LOG_TAG, "Handler is null"); } } else { Log.d(LOG_TAG, "Bluetooth is not enabled"); } } private BluetoothAdapter.LeScanCallback mLeScanCallback = new BluetoothAdapter.LeScanCallback() { @Override public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) { Log.d(LOG_TAG, "onLeScan: " + rssi); if(scannedDevices.add(device)) { Log.d(LOG_TAG, device.getName() + " : " + device.getAddress() + " : " + device.getType() + " : " + device.getBondState()); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DEVICE); msg.putExtra(Intents.INTENT_EXTRA_DATA, device.getName() + "," + device.getAddress()); context.sendBroadcast(msg); } } }; @SuppressLint("NewApi") private ScanCallback mScanCallback = new ScanCallback() { @Override public void onScanResult(int callbackType, ScanResult result) { Log.d(LOG_TAG, "onScanResult: " + callbackType); BluetoothDevice device = result.getDevice(); if(scannedDevices.add(device)) { Log.d(LOG_TAG, device.getName() + " : " + device.getAddress() + " : " + device.getType() + " : " + device.getBondState()); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DEVICE); msg.putExtra(Intents.INTENT_EXTRA_DATA, device.getName() + "," + device.getAddress()); context.sendBroadcast(msg); } } }; private final BroadcastReceiver scanReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { Log.d(LOG_TAG, "scanReceiver"); String action = intent.getAction(); if(BluetoothDevice.ACTION_FOUND.equals(action)) { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); if(scannedDevices.add(device)) { Log.d(LOG_TAG, device.getName() + " : " + device.getAddress() + " : " + device.getType() + " : " + device.getBondState()); Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DEVICE); msg.putExtra(Intents.INTENT_EXTRA_DATA, device.getName() + "," + device.getAddress()); context.sendBroadcast(msg); } } } }; private class EnableBluetoothThread extends Thread { public void run() { boolean bluetoothEnabled = true; long timeStart = Calendar.getInstance().getTimeInMillis(); Log.d(LOG_TAG, "EnableBluetoothThread: " + timeStart); bluetoothAdapter.enable(); while(!bluetoothAdapter.isEnabled()) { try { long timeDiff = Calendar.getInstance().getTimeInMillis() - timeStart; if(timeDiff >= 5000) { bluetoothEnabled = false; break; } Thread.sleep(100L); } catch (InterruptedException ie) { // unexpected interruption while enabling bluetooth Thread.currentThread().interrupt(); // restore interrupted flag return; } } if(bluetoothEnabled) { isEnabled = true; Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_ENABLED); context.sendBroadcast(msg); Log.d(LOG_TAG, "Enabled"); } else { isEnabled = false; Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_ENABLED_FAILED); context.sendBroadcast(msg); Log.d(LOG_TAG, "Timed out"); } } } private class ConnectBluetoothThread extends Thread { public ConnectBluetoothThread() { Log.d(LOG_TAG, "ConnectBluetoothThread"); // get a BluetoothSocket to connectBle with the given BluetoothDevice try { Log.d(LOG_TAG, "try ConnectBluetoothThread: " + bluetoothDevice.getName() + " with UUID: " + NOTIFYTE_MOBILE_UUID.toString()); bluetoothSocket = bluetoothDevice.createRfcommSocketToServiceRecord(NOTIFYTE_MOBILE_UUID); } catch(Exception e) { Log.e(LOG_TAG, "Error: bluetoothDevice.createRfcommSocketToServiceRecord()", e); } } public void run() { Log.d(LOG_TAG, "Running ConnectBluetoothThread"); // Cancel discovery because it will slow down the connection bluetoothAdapter.cancelDiscovery(); try { // connectBle the device through the socket. This will block until it succeeds or throws an exception bluetoothSocket.connect(); isConnected = true; if(context != null) { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_RFCOMM_CONNECTED); context.sendBroadcast(msg); } } catch(IOException connectException) { Log.e(LOG_TAG, "Error: bluetoothSocket.connectBle()", connectException); try { bluetoothSocket.close(); if(context != null) { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_RFCOMM_CONNECTED_FAILED); context.sendBroadcast(msg); } } catch(IOException closeException) { Log.e(LOG_TAG, "Error: BluetoothSocket.close()", closeException); } return; } Log.d(LOG_TAG, "ConnectBluetoothThread connected"); // Manage the connection bluetoothThread = new BluetoothThread(); bluetoothThread.start(); } public void close() { if(bluetoothThread != null) { isThreadRunning = false; bluetoothThread.close(); isConnected = false; Log.d(LOG_TAG, "ConnectBluetoothThread close"); if(context != null) { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_RFCOMM_DISCONNECTED); context.sendBroadcast(msg); } } } } public class BluetoothThread extends Thread { public BluetoothThread() { Log.d(LOG_TAG, "BluetoothThread"); // get the input and output streams try { inputStream = bluetoothSocket.getInputStream(); outputStream = bluetoothSocket.getOutputStream(); isThreadRunning = true; } catch(IOException e) { close(); Log.e(LOG_TAG, "Error: bluetoothSocket.getInputStream()/socket.getOutputStream()", e); } } public void run() { Log.d(LOG_TAG, "Running BluetoothThread"); ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); int bufferSize = 1024; byte[] buffer = new byte[bufferSize]; // listen to the InputStream while(isThreadRunning) { try { int bytes = inputStream.read(buffer); byteArray.write(buffer, 0, bytes); Log.d(LOG_TAG, "Received: " + byteArray); try { Intent msg = new Intent(Intents.INTENT_BLUETOOTH); msg.putExtra(Intents.INTENT_EXTRA_MSG, Intents.INTENT_BLUETOOTH_DATA); msg.putExtra(Intents.INTENT_BLUETOOTH_DATA, byteArray.toByteArray()); context.sendBroadcast(msg); } catch(Exception e) { Log.e(LOG_TAG, "Error: mHandler.obtainMessage()", e); } byteArray.reset(); } catch(IOException e) { if(isThreadRunning) { Log.e(LOG_TAG, "Error: inputStream.read()", e); close(); bluetoothThread.close(); if(connectThread != null) { connectThread.close(); connectThread = null; } } } } } public void write(byte[] bytes) { try { ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); byteArray.write(bytes, 0, bytes.length); Log.d(LOG_TAG, "Sending: " + byteArray); outputStream.write(bytes); outputStream.flush(); } catch(IOException e) { Log.e(LOG_TAG, "Error: outputStream.write()", e); } } public void close() { try { inputStream.close(); } catch(IOException e) { Log.e(LOG_TAG, "Error: inputStream.close()", e); } try { outputStream.close(); } catch(IOException e) { Log.e(LOG_TAG, "Error: outputStream.close()", e); } try { bluetoothSocket.close(); } catch(IOException e) { Log.e(LOG_TAG, "Error: bluetoothSocket.close()", e); } } } @SuppressWarnings("unused") private class ServerThread extends Thread { public ServerThread() { Log.d(LOG_TAG, "ServerThread"); try { Log.d(LOG_TAG, "try ServerThread with UUID: " + NOTIFYTE_MOBILE_UUID); bluetoothServerSocket = bluetoothAdapter.listenUsingRfcommWithServiceRecord("SessionManagerSecure", NOTIFYTE_MOBILE_UUID); } catch(IOException e) { Log.e(LOG_TAG, "Error bluetoothAdapter.listenUsingRfcommWithServiceRecord()"); e.printStackTrace(); } } public void run() { Log.d(LOG_TAG, "Running ServerThread"); try { bluetoothServerSocket.accept(); Log.d(LOG_TAG, "bluetoothServerSocket.accept() success"); } catch(IOException e) { Log.e(LOG_TAG, "Error bluetoothServerSocket.accept()"); e.printStackTrace(); } } public void close() { try { bluetoothServerSocket.close(); } catch(IOException e) { Log.e(LOG_TAG, "Error: mmSocket.close()", e); } } } private class ReconnectBluetoothThread extends Thread { public void run() { long timeStart = Calendar.getInstance().getTimeInMillis(); Log.d(LOG_TAG, "Reconnecting Bluetooth: " + timeStart); while(isReconnecting) { long lastAttempt = Calendar.getInstance().getTimeInMillis() - RECCONECT_TIME; if(allowedReconnect(lastAttempt / 1000)) { Log.d(LOG_TAG, "Attempting to reconnect: " + RECONNECT_ATTEMPTS); RECCONECT_TIME = Calendar.getInstance().getTimeInMillis(); BluetoothLeService.this.disconnectBle(); BluetoothLeService.this.connectBle(bluetoothAddress); } } } public void close() { isReconnecting = false; } } public boolean allowedReconnect(long elapsed) { boolean allowedReconnect = false; if(elapsed > 30 && RECONNECT_ATTEMPTS < 4) { // every 30 secs (2 min) RECONNECT_ATTEMPTS += 1; allowedReconnect = true; } else if(elapsed > 60 && RECONNECT_ATTEMPTS < 9) { // every 1 min (5 mins) RECONNECT_ATTEMPTS += 1; allowedReconnect = true; } else if(elapsed > 300 && RECONNECT_ATTEMPTS < 21) { // every 5 min (1 hr) RECONNECT_ATTEMPTS += 1; allowedReconnect = true; } if(RECONNECT_ATTEMPTS > 21) { RECONNECT_ATTEMPTS = 0; Log.d(LOG_TAG, "reconnects attempts exceeded. giving up"); BluetoothLeService.this.disconnectBle(); BluetoothLeService.this.stopReconnectBle(); allowedReconnect = false; } return allowedReconnect; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.indexer; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.io.Closeables; import com.metamx.common.ISE; import com.metamx.common.logger.Logger; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.granularity.QueryGranularity; import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.joda.time.DateTime; import org.joda.time.DateTimeComparator; import org.joda.time.Interval; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Set; /** * Determines appropriate ShardSpecs for a job by determining approximate cardinality of data set using HyperLogLog */ public class DetermineHashedPartitionsJob implements Jobby { private static final Logger log = new Logger(DetermineHashedPartitionsJob.class); private final HadoopDruidIndexerConfig config; public DetermineHashedPartitionsJob( HadoopDruidIndexerConfig config ) { this.config = config; } public boolean run() { try { /* * Group by (timestamp, dimensions) so we can correctly count dimension values as they would appear * in the final segment. */ long startTime = System.currentTimeMillis(); final Job groupByJob = Job.getInstance( new Configuration(), String.format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals()) ); JobHelper.injectSystemProperties(groupByJob); config.addJobProperties(groupByJob); groupByJob.setMapperClass(DetermineCardinalityMapper.class); groupByJob.setMapOutputKeyClass(LongWritable.class); groupByJob.setMapOutputValueClass(BytesWritable.class); groupByJob.setReducerClass(DetermineCardinalityReducer.class); groupByJob.setOutputKeyClass(NullWritable.class); groupByJob.setOutputValueClass(NullWritable.class); groupByJob.setOutputFormatClass(SequenceFileOutputFormat.class); groupByJob.setPartitionerClass(DetermineHashedPartitionsPartitioner.class); if (!config.getSegmentGranularIntervals().isPresent()) { groupByJob.setNumReduceTasks(1); } else { groupByJob.setNumReduceTasks(config.getSegmentGranularIntervals().get().size()); } JobHelper.setupClasspath( JobHelper.distributedClassPath(config.getWorkingPath()), JobHelper.distributedClassPath(config.makeIntermediatePath()), groupByJob ); config.addInputPaths(groupByJob); config.intoConfiguration(groupByJob); FileOutputFormat.setOutputPath(groupByJob, config.makeGroupedDataDir()); groupByJob.submit(); log.info("Job %s submitted, status available at: %s", groupByJob.getJobName(), groupByJob.getTrackingURL()); if (!groupByJob.waitForCompletion(true)) { log.error("Job failed: %s", groupByJob.getJobID()); return false; } /* * Load partitions and intervals determined by the previous job. */ log.info("Job completed, loading up partitions for intervals[%s].", config.getSegmentGranularIntervals()); FileSystem fileSystem = null; if (!config.getSegmentGranularIntervals().isPresent()) { final Path intervalInfoPath = config.makeIntervalInfoPath(); fileSystem = intervalInfoPath.getFileSystem(groupByJob.getConfiguration()); if (!Utils.exists(groupByJob, fileSystem, intervalInfoPath)) { throw new ISE("Path[%s] didn't exist!?", intervalInfoPath); } List<Interval> intervals = config.JSON_MAPPER.readValue( Utils.openInputStream(groupByJob, intervalInfoPath), new TypeReference<List<Interval>>() { } ); config.setGranularitySpec( new UniformGranularitySpec( config.getGranularitySpec().getSegmentGranularity(), config.getGranularitySpec().getQueryGranularity(), intervals ) ); log.info("Determined Intervals for Job [%s]" + config.getSegmentGranularIntervals()); } Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance()); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { DateTime bucket = segmentGranularity.getStart(); final Path partitionInfoPath = config.makeSegmentPartitionInfoPath(segmentGranularity); if (fileSystem == null) { fileSystem = partitionInfoPath.getFileSystem(groupByJob.getConfiguration()); } if (Utils.exists(groupByJob, fileSystem, partitionInfoPath)) { final Long numRows = config.JSON_MAPPER.readValue( Utils.openInputStream(groupByJob, partitionInfoPath), new TypeReference<Long>() { } ); log.info("Found approximately [%,d] rows in data.", numRows); final int numberOfShards = (int) Math.ceil((double) numRows / config.getTargetPartitionSize()); log.info("Creating [%,d] shards", numberOfShards); List<HadoopyShardSpec> actualSpecs = Lists.newArrayListWithExpectedSize(numberOfShards); if (numberOfShards == 1) { actualSpecs.add(new HadoopyShardSpec(NoneShardSpec.instance(), shardCount++)); } else { for (int i = 0; i < numberOfShards; ++i) { actualSpecs.add( new HadoopyShardSpec( new HashBasedNumberedShardSpec( i, numberOfShards, null, HadoopDruidIndexerConfig.JSON_MAPPER ), shardCount++ ) ); log.info("DateTime[%s], partition[%d], spec[%s]", bucket, i, actualSpecs.get(i)); } } shardSpecs.put(bucket, actualSpecs); } else { log.info("Path[%s] didn't exist!?", partitionInfoPath); } } config.setShardSpecs(shardSpecs); log.info( "DetermineHashedPartitionsJob took %d millis", (System.currentTimeMillis() - startTime) ); return true; } catch (Exception e) { throw Throwables.propagate(e); } } public static class DetermineCardinalityMapper extends HadoopDruidIndexerMapper<LongWritable, BytesWritable> { private static HashFunction hashFunction = Hashing.murmur3_128(); private QueryGranularity rollupGranularity = null; private Map<Interval, HyperLogLogCollector> hyperLogLogs; private HadoopDruidIndexerConfig config; private boolean determineIntervals; @Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); rollupGranularity = getConfig().getGranularitySpec().getQueryGranularity(); config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); Optional<Set<Interval>> intervals = config.getSegmentGranularIntervals(); if (intervals.isPresent()) { determineIntervals = false; final ImmutableMap.Builder<Interval, HyperLogLogCollector> builder = ImmutableMap.builder(); for (final Interval bucketInterval : intervals.get()) { builder.put(bucketInterval, HyperLogLogCollector.makeLatestCollector()); } hyperLogLogs = builder.build(); } else { determineIntervals = true; hyperLogLogs = Maps.newHashMap(); } } @Override protected void innerMap( InputRow inputRow, Object value, Context context, boolean reportParseExceptions ) throws IOException, InterruptedException { final List<Object> groupKey = Rows.toGroupKey( rollupGranularity.truncate(inputRow.getTimestampFromEpoch()), inputRow ); Interval interval; if (determineIntervals) { interval = config.getGranularitySpec() .getSegmentGranularity() .bucket(new DateTime(inputRow.getTimestampFromEpoch())); if (!hyperLogLogs.containsKey(interval)) { hyperLogLogs.put(interval, HyperLogLogCollector.makeLatestCollector()); } } else { final Optional<Interval> maybeInterval = config.getGranularitySpec() .bucketInterval(new DateTime(inputRow.getTimestampFromEpoch())); if (!maybeInterval.isPresent()) { throw new ISE("WTF?! No bucket found for timestamp: %s", inputRow.getTimestampFromEpoch()); } interval = maybeInterval.get(); } hyperLogLogs.get(interval) .add( hashFunction.hashBytes(HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(groupKey)) .asBytes() ); } @Override public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKeyValue()) { map(context.getCurrentKey(), context.getCurrentValue(), context); } for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) { context.write( new LongWritable(entry.getKey().getStartMillis()), new BytesWritable(entry.getValue().toByteArray()) ); } cleanup(context); } } public static class DetermineCardinalityReducer extends Reducer<LongWritable, BytesWritable, NullWritable, NullWritable> { private final List<Interval> intervals = Lists.newArrayList(); protected HadoopDruidIndexerConfig config = null; @Override protected void setup(Context context) throws IOException, InterruptedException { config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); } @Override protected void reduce( LongWritable key, Iterable<BytesWritable> values, Context context ) throws IOException, InterruptedException { HyperLogLogCollector aggregate = HyperLogLogCollector.makeLatestCollector(); for (BytesWritable value : values) { aggregate.fold(ByteBuffer.wrap(value.getBytes(), 0, value.getLength())); } Interval interval = config.getGranularitySpec().getSegmentGranularity().bucket(new DateTime(key.get())); intervals.add(interval); final Path outPath = config.makeSegmentPartitionInfoPath(interval); final OutputStream out = Utils.makePathAndOutputStream( context, outPath, config.isOverwriteFiles() ); try { HadoopDruidIndexerConfig.JSON_MAPPER.writerWithType( new TypeReference<Long>() { } ).writeValue( out, new Double(aggregate.estimateCardinality()).longValue() ); } finally { Closeables.close(out, false); } } @Override public void run(Context context) throws IOException, InterruptedException { super.run(context); if (!config.getSegmentGranularIntervals().isPresent()) { final Path outPath = config.makeIntervalInfoPath(); final OutputStream out = Utils.makePathAndOutputStream( context, outPath, config.isOverwriteFiles() ); try { HadoopDruidIndexerConfig.JSON_MAPPER.writerWithType( new TypeReference<List<Interval>>() { } ).writeValue( out, intervals ); } finally { Closeables.close(out, false); } } } } public static class DetermineHashedPartitionsPartitioner extends Partitioner<LongWritable, BytesWritable> implements Configurable { private Configuration config; private boolean determineIntervals; private Map<LongWritable, Integer> reducerLookup; @Override public int getPartition(LongWritable interval, BytesWritable text, int numPartitions) { if (config.get("mapred.job.tracker").equals("local") || determineIntervals) { return 0; } else { return reducerLookup.get(interval); } } @Override public Configuration getConf() { return config; } @Override public void setConf(Configuration config) { this.config = config; HadoopDruidIndexerConfig hadoopConfig = HadoopDruidIndexerConfig.fromConfiguration(config); if (hadoopConfig.getSegmentGranularIntervals().isPresent()) { determineIntervals = false; int reducerNumber = 0; ImmutableMap.Builder<LongWritable, Integer> builder = ImmutableMap.builder(); for (Interval interval : hadoopConfig.getSegmentGranularIntervals().get()) { builder.put(new LongWritable(interval.getStartMillis()), reducerNumber++); } reducerLookup = builder.build(); } else { determineIntervals = true; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.image; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.Uuid; import org.apache.kafka.common.metadata.PartitionChangeRecord; import org.apache.kafka.common.metadata.PartitionRecord; import org.apache.kafka.common.metadata.RemoveTopicRecord; import org.apache.kafka.common.metadata.TopicRecord; import org.apache.kafka.metadata.PartitionRegistration; import org.apache.kafka.metadata.RecordTestUtils; import org.apache.kafka.metadata.Replicas; import org.apache.kafka.server.common.ApiMessageAndVersion; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import static org.apache.kafka.common.metadata.MetadataRecordType.PARTITION_CHANGE_RECORD; import static org.apache.kafka.common.metadata.MetadataRecordType.PARTITION_RECORD; import static org.apache.kafka.common.metadata.MetadataRecordType.REMOVE_TOPIC_RECORD; import static org.apache.kafka.common.metadata.MetadataRecordType.TOPIC_RECORD; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @Timeout(value = 40) public class TopicsImageTest { static final TopicsImage IMAGE1; static final List<ApiMessageAndVersion> DELTA1_RECORDS; static final TopicsDelta DELTA1; static final TopicsImage IMAGE2; static final List<TopicImage> TOPIC_IMAGES1; private static TopicImage newTopicImage(String name, Uuid id, PartitionRegistration... partitions) { Map<Integer, PartitionRegistration> partitionMap = new HashMap<>(); int i = 0; for (PartitionRegistration partition : partitions) { partitionMap.put(i++, partition); } return new TopicImage(name, id, partitionMap); } private static Map<Uuid, TopicImage> newTopicsByIdMap(Collection<TopicImage> topics) { Map<Uuid, TopicImage> map = new HashMap<>(); for (TopicImage topic : topics) { map.put(topic.id(), topic); } return map; } private static Map<String, TopicImage> newTopicsByNameMap(Collection<TopicImage> topics) { Map<String, TopicImage> map = new HashMap<>(); for (TopicImage topic : topics) { map.put(topic.name(), topic); } return map; } private static final Uuid FOO_UUID = Uuid.fromString("ThIaNwRnSM2Nt9Mx1v0RvA"); private static final Uuid BAR_UUID = Uuid.fromString("f62ptyETTjet8SL5ZeREiw"); private static final Uuid BAZ_UUID = Uuid.fromString("tgHBnRglT5W_RlENnuG5vg"); static { TOPIC_IMAGES1 = Arrays.asList( newTopicImage("foo", FOO_UUID, new PartitionRegistration(new int[] {2, 3, 4}, new int[] {2, 3}, Replicas.NONE, Replicas.NONE, 2, 1, 345), new PartitionRegistration(new int[] {3, 4, 5}, new int[] {3, 4, 5}, Replicas.NONE, Replicas.NONE, 3, 4, 684), new PartitionRegistration(new int[] {2, 4, 5}, new int[] {2, 4, 5}, Replicas.NONE, Replicas.NONE, 2, 10, 84)), newTopicImage("bar", BAR_UUID, new PartitionRegistration(new int[] {0, 1, 2, 3, 4}, new int[] {0, 1, 2, 3}, new int[] {1}, new int[] {3, 4}, 0, 1, 345))); IMAGE1 = new TopicsImage(newTopicsByIdMap(TOPIC_IMAGES1), newTopicsByNameMap(TOPIC_IMAGES1)); DELTA1_RECORDS = new ArrayList<>(); DELTA1_RECORDS.add(new ApiMessageAndVersion(new RemoveTopicRecord(). setTopicId(FOO_UUID), REMOVE_TOPIC_RECORD.highestSupportedVersion())); DELTA1_RECORDS.add(new ApiMessageAndVersion(new PartitionChangeRecord(). setTopicId(BAR_UUID). setPartitionId(0).setLeader(1), PARTITION_CHANGE_RECORD.highestSupportedVersion())); DELTA1_RECORDS.add(new ApiMessageAndVersion(new TopicRecord(). setName("baz").setTopicId(BAZ_UUID), TOPIC_RECORD.highestSupportedVersion())); DELTA1_RECORDS.add(new ApiMessageAndVersion(new PartitionRecord(). setPartitionId(0). setTopicId(BAZ_UUID). setReplicas(Arrays.asList(1, 2, 3, 4)). setIsr(Arrays.asList(3, 4)). setRemovingReplicas(Collections.singletonList(2)). setAddingReplicas(Collections.singletonList(1)). setLeader(3). setLeaderEpoch(2). setPartitionEpoch(1), PARTITION_RECORD.highestSupportedVersion())); DELTA1 = new TopicsDelta(IMAGE1); RecordTestUtils.replayAll(DELTA1, DELTA1_RECORDS); List<TopicImage> topics2 = Arrays.asList( newTopicImage("bar", BAR_UUID, new PartitionRegistration(new int[] {0, 1, 2, 3, 4}, new int[] {0, 1, 2, 3}, new int[] {1}, new int[] {3, 4}, 1, 2, 346)), newTopicImage("baz", BAZ_UUID, new PartitionRegistration(new int[] {1, 2, 3, 4}, new int[] {3, 4}, new int[] {2}, new int[] {1}, 3, 2, 1))); IMAGE2 = new TopicsImage(newTopicsByIdMap(topics2), newTopicsByNameMap(topics2)); } private ApiMessageAndVersion newPartitionRecord(Uuid topicId, int partitionId, List<Integer> replicas) { return new ApiMessageAndVersion( new PartitionRecord() .setPartitionId(partitionId) .setTopicId(topicId) .setReplicas(replicas) .setIsr(replicas) .setLeader(replicas.get(0)) .setLeaderEpoch(1) .setPartitionEpoch(1), PARTITION_RECORD.highestSupportedVersion() ); } private PartitionRegistration newPartition(int[] replicas) { return new PartitionRegistration(replicas, replicas, Replicas.NONE, Replicas.NONE, replicas[0], 1, 1); } @Test public void testBasicLocalChanges() { int localId = 3; /* Changes already include in DELTA1_RECORDS and IMAGE1: * foo - topic id deleted * bar-0 - stay as follower with different partition epoch * baz-0 - new topic to leader */ List<ApiMessageAndVersion> topicRecords = new ArrayList<>(DELTA1_RECORDS); // Create a new foo topic with a different id Uuid newFooId = Uuid.fromString("b66ybsWIQoygs01vdjH07A"); topicRecords.add( new ApiMessageAndVersion( new TopicRecord().setName("foo") .setTopicId(newFooId), TOPIC_RECORD.highestSupportedVersion() ) ); topicRecords.add(newPartitionRecord(newFooId, 0, Arrays.asList(0, 1, 2))); topicRecords.add(newPartitionRecord(newFooId, 1, Arrays.asList(0, 1, localId))); // baz-1 - new partition to follower topicRecords.add( new ApiMessageAndVersion( new PartitionRecord() .setPartitionId(1) .setTopicId(BAZ_UUID) .setReplicas(Arrays.asList(4, 2, localId)) .setIsr(Arrays.asList(4, 2, localId)) .setLeader(4) .setLeaderEpoch(2) .setPartitionEpoch(1), PARTITION_RECORD.highestSupportedVersion() ) ); TopicsDelta delta = new TopicsDelta(IMAGE1); RecordTestUtils.replayAll(delta, topicRecords); LocalReplicaChanges changes = delta.localChanges(localId); assertEquals( new HashSet<>(Arrays.asList(new TopicPartition("foo", 0), new TopicPartition("foo", 1))), changes.deletes() ); assertEquals( new HashSet<>(Arrays.asList(new TopicPartition("baz", 0))), changes.leaders().keySet() ); assertEquals( new HashSet<>( Arrays.asList(new TopicPartition("baz", 1), new TopicPartition("bar", 0), new TopicPartition("foo", 1)) ), changes.followers().keySet() ); } @Test public void testDeleteAfterChanges() { int localId = 3; Uuid zooId = Uuid.fromString("0hHJ3X5ZQ-CFfQ5xgpj90w"); List<TopicImage> topics = new ArrayList<>(); topics.add( newTopicImage( "zoo", zooId, newPartition(new int[] {localId, 1, 2}) ) ); TopicsImage image = new TopicsImage(newTopicsByIdMap(topics), newTopicsByNameMap(topics)); List<ApiMessageAndVersion> topicRecords = new ArrayList<>(); // leader to follower topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(zooId).setPartitionId(0).setLeader(1), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // remove zoo topic topicRecords.add( new ApiMessageAndVersion( new RemoveTopicRecord().setTopicId(zooId), REMOVE_TOPIC_RECORD.highestSupportedVersion() ) ); TopicsDelta delta = new TopicsDelta(image); RecordTestUtils.replayAll(delta, topicRecords); LocalReplicaChanges changes = delta.localChanges(localId); assertEquals(new HashSet<>(Arrays.asList(new TopicPartition("zoo", 0))), changes.deletes()); assertEquals(Collections.emptyMap(), changes.leaders()); assertEquals(Collections.emptyMap(), changes.followers()); } @Test public void testLocalReassignmentChanges() { int localId = 3; Uuid zooId = Uuid.fromString("0hHJ3X5ZQ-CFfQ5xgpj90w"); List<TopicImage> topics = new ArrayList<>(); topics.add( newTopicImage( "zoo", zooId, newPartition(new int[] {0, 1, localId}), newPartition(new int[] {localId, 1, 2}), newPartition(new int[] {0, 1, localId}), newPartition(new int[] {localId, 1, 2}), newPartition(new int[] {0, 1, 2}), newPartition(new int[] {0, 1, 2}) ) ); TopicsImage image = new TopicsImage(newTopicsByIdMap(topics), newTopicsByNameMap(topics)); List<ApiMessageAndVersion> topicRecords = new ArrayList<>(); // zoo-0 - follower to leader topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(zooId).setPartitionId(0).setLeader(localId), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // zoo-1 - leader to follower topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(zooId).setPartitionId(1).setLeader(1), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // zoo-2 - follower to removed topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord() .setTopicId(zooId) .setPartitionId(2) .setIsr(Arrays.asList(0, 1, 2)) .setReplicas(Arrays.asList(0, 1, 2)), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // zoo-3 - leader to removed topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord() .setTopicId(zooId) .setPartitionId(3) .setLeader(0) .setIsr(Arrays.asList(0, 1, 2)) .setReplicas(Arrays.asList(0, 1, 2)), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // zoo-4 - not replica to leader topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord() .setTopicId(zooId) .setPartitionId(4) .setLeader(localId) .setIsr(Arrays.asList(localId, 1, 2)) .setReplicas(Arrays.asList(localId, 1, 2)), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); // zoo-5 - not replica to follower topicRecords.add( new ApiMessageAndVersion( new PartitionChangeRecord() .setTopicId(zooId) .setPartitionId(5) .setIsr(Arrays.asList(0, 1, localId)) .setReplicas(Arrays.asList(0, 1, localId)), PARTITION_CHANGE_RECORD.highestSupportedVersion() ) ); TopicsDelta delta = new TopicsDelta(image); RecordTestUtils.replayAll(delta, topicRecords); LocalReplicaChanges changes = delta.localChanges(localId); assertEquals( new HashSet<>(Arrays.asList(new TopicPartition("zoo", 2), new TopicPartition("zoo", 3))), changes.deletes() ); assertEquals( new HashSet<>(Arrays.asList(new TopicPartition("zoo", 0), new TopicPartition("zoo", 4))), changes.leaders().keySet() ); assertEquals( new HashSet<>(Arrays.asList(new TopicPartition("zoo", 1), new TopicPartition("zoo", 5))), changes.followers().keySet() ); } @Test public void testEmptyImageRoundTrip() throws Throwable { testToImageAndBack(TopicsImage.EMPTY); } @Test public void testImage1RoundTrip() throws Throwable { testToImageAndBack(IMAGE1); } @Test public void testApplyDelta1() throws Throwable { assertEquals(IMAGE2, DELTA1.apply()); } @Test public void testImage2RoundTrip() throws Throwable { testToImageAndBack(IMAGE2); } private void testToImageAndBack(TopicsImage image) throws Throwable { MockSnapshotConsumer writer = new MockSnapshotConsumer(); image.write(writer); TopicsDelta delta = new TopicsDelta(TopicsImage.EMPTY); RecordTestUtils.replayAllBatches(delta, writer.batches()); TopicsImage nextImage = delta.apply(); assertEquals(image, nextImage); } @Test public void testTopicNameToIdView() { Map<String, Uuid> map = IMAGE1.topicNameToIdView(); assertTrue(map.containsKey("foo")); assertEquals(FOO_UUID, map.get("foo")); assertTrue(map.containsKey("bar")); assertEquals(BAR_UUID, map.get("bar")); assertFalse(map.containsKey("baz")); assertEquals(null, map.get("baz")); HashSet<Uuid> uuids = new HashSet<>(); map.values().iterator().forEachRemaining(u -> uuids.add(u)); HashSet<Uuid> expectedUuids = new HashSet<>(Arrays.asList( Uuid.fromString("ThIaNwRnSM2Nt9Mx1v0RvA"), Uuid.fromString("f62ptyETTjet8SL5ZeREiw"))); assertEquals(expectedUuids, uuids); assertThrows(UnsupportedOperationException.class, () -> map.remove("foo")); assertThrows(UnsupportedOperationException.class, () -> map.put("bar", FOO_UUID)); } @Test public void testTopicIdToNameView() { Map<Uuid, String> map = IMAGE1.topicIdToNameView(); assertTrue(map.containsKey(FOO_UUID)); assertEquals("foo", map.get(FOO_UUID)); assertTrue(map.containsKey(BAR_UUID)); assertEquals("bar", map.get(BAR_UUID)); assertFalse(map.containsKey(BAZ_UUID)); assertEquals(null, map.get(BAZ_UUID)); HashSet<String> names = new HashSet<>(); map.values().iterator().forEachRemaining(n -> names.add(n)); HashSet<String> expectedNames = new HashSet<>(Arrays.asList("foo", "bar")); assertEquals(expectedNames, names); assertThrows(UnsupportedOperationException.class, () -> map.remove(FOO_UUID)); assertThrows(UnsupportedOperationException.class, () -> map.put(FOO_UUID, "bar")); } }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2019 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.filefetch.impl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.IOException; import java.lang.annotation.Annotation; import java.net.HttpURLConnection; import org.apache.sling.api.resource.LoginException; import org.apache.sling.api.resource.ResourceResolverFactory; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.mockito.Mockito; import com.adobe.acs.commons.filefetch.FileFetchConfiguration; import com.day.cq.replication.ReplicationException; import com.day.cq.replication.Replicator; import io.wcm.testing.mock.aem.junit.AemContext; public class FileFetchImplTest { @Rule public final AemContext context = new AemContext(); private FileFetcherImpl fileFetch; @Before public void init() throws LoginException { fileFetch = new FileFetcherImpl() { protected HttpURLConnection openConnection() throws IOException { HttpURLConnection huc = Mockito.mock(HttpURLConnection.class); if ("https://www.danklco.com/me.png".equals(config.remoteUrl())) { Mockito.when(huc.getResponseCode()).thenReturn(200); Mockito.when(huc.getInputStream()).thenReturn(new ByteArrayInputStream("Hello World".getBytes())); } else if ("https://www.perficientdigital.com/logo.png".equals(config.remoteUrl())) { Mockito.when(huc.getResponseCode()).thenReturn(304); } else { Mockito.when(huc.getResponseCode()).thenReturn(400); } Mockito.when(huc.getHeaderField("Last-Modified")).thenReturn("1970-01-01"); return huc; } }; ResourceResolverFactory factory = Mockito.mock(ResourceResolverFactory.class); Mockito.when(factory.getServiceResourceResolver(Mockito.any())).thenReturn(context.resourceResolver()); fileFetch.setFactory(factory); fileFetch.setReplicator(Mockito.mock(Replicator.class)); } @Test public void testFetch() throws IOException, ReplicationException { fileFetch.activate(new FileFetchConfiguration() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public String damPath() { return "/content/dam/an-asset.png"; } @Override public String[] headers() { return new String[] { "Hi=123" }; } @Override public String mimeType() { return "image/png"; } @Override public String remoteUrl() { return "https://www.danklco.com/me.png"; } @Override public String scheduler_expression() { return "* * * * *"; } @Override public int[] validResponseCodes() { return new int[] { 200 }; } @Override public int timeout() { return 5000; } }); assertNull(fileFetch.getLastException()); assertTrue(fileFetch.isLastJobSucceeded()); assertEquals("1970-01-01",fileFetch.getLastModified()); fileFetch.updateFile(); assertNull(fileFetch.getLastException()); assertTrue(fileFetch.isLastJobSucceeded()); } @Test public void testFetchNoUpdate() throws IOException, ReplicationException { fileFetch.activate(new FileFetchConfiguration() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public String damPath() { return "/content/dam/an-asset.png"; } @Override public String[] headers() { return new String[] { "Hi=123" }; } @Override public String mimeType() { return "image/png"; } @Override public String remoteUrl() { return "https://www.perficientdigital.com/logo.png"; } @Override public String scheduler_expression() { return "* * * * *"; } @Override public int[] validResponseCodes() { return new int[] { 200 }; } @Override public int timeout() { return 5000; } }); assertNull(fileFetch.getLastException()); assertTrue(fileFetch.isLastJobSucceeded()); assertEquals(null,fileFetch.getLastModified()); } @Test public void testBadUrl() throws IOException, ReplicationException { fileFetch.activate(new FileFetchConfiguration() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public String damPath() { return "/content/dam/an-asset.png"; } @Override public String[] headers() { return new String[] { "Hi=123" }; } @Override public String mimeType() { return "image/png"; } @Override public String remoteUrl() { return "https://www.adobe.com/logo.png"; } @Override public String scheduler_expression() { return "* * * * *"; } @Override public int[] validResponseCodes() { return new int[] { 200 }; } @Override public int timeout() { return 5000; } }); assertNotNull(fileFetch.getLastException()); assertFalse(fileFetch.isLastJobSucceeded()); } }
/* Copyright (c) 2013 OpenPlans. All rights reserved. * This code is licensed under the BSD New License, available at the root * application directory. */ package org.geogit.cli.test.functional; import static org.geogit.cli.test.functional.GlobalState.currentDirectory; import static org.geogit.cli.test.functional.GlobalState.geogit; import static org.geogit.cli.test.functional.GlobalState.geogitCLI; import static org.geogit.cli.test.functional.GlobalState.homeDirectory; import static org.geogit.cli.test.functional.GlobalState.stdOut; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.List; import java.util.Map; import java.util.Random; import org.apache.commons.io.FileUtils; import org.geogit.api.GeoGIT; import org.geogit.api.GlobalInjectorBuilder; import org.geogit.api.NodeRef; import org.geogit.api.ObjectId; import org.geogit.api.Ref; import org.geogit.api.RevFeatureType; import org.geogit.api.plumbing.RefParse; import org.geogit.api.plumbing.UpdateRef; import org.geogit.api.plumbing.diff.AttributeDiff; import org.geogit.api.plumbing.diff.FeatureDiff; import org.geogit.api.plumbing.diff.GenericAttributeDiffImpl; import org.geogit.api.plumbing.diff.Patch; import org.geogit.api.plumbing.diff.PatchSerializer; import org.geogit.api.porcelain.BranchCreateOp; import org.geogit.api.porcelain.CheckoutOp; import org.geogit.api.porcelain.CommitOp; import org.geogit.api.porcelain.MergeOp; import org.opengis.feature.Feature; import org.opengis.feature.type.PropertyDescriptor; import com.google.common.base.Charsets; import com.google.common.base.Optional; import com.google.common.base.Suppliers; import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.inject.Injector; import cucumber.annotation.en.Given; import cucumber.annotation.en.Then; import cucumber.annotation.en.When; /** * */ public class InitSteps extends AbstractGeogitFunctionalTest { private static final String LINE_SEPARATOR = System.getProperty("line.separator"); @cucumber.annotation.After public void after() { if (GlobalState.geogitCLI != null) { GlobalState.geogitCLI.close(); } if (GlobalState.geogit != null) { GlobalState.geogit.close(); } deleteDirectories(); } @Given("^I am in an empty directory$") public void I_am_in_an_empty_directory() throws Throwable { setUpDirectories(); assertEquals(0, currentDirectory.list().length); setupGeogit(); } @When("^I run the command \"([^\"]*)\"$") public void I_run_the_command_X(String commandSpec) throws Throwable { String[] args = commandSpec.split(" "); for (int i = 0; i < args.length; i++) { args[i] = args[i].replace("${currentdir}", currentDirectory.getAbsolutePath()); } runCommand(args); } @Then("^it should answer \"([^\"]*)\"$") public void it_should_answer_exactly(String expected) throws Throwable { expected = expected.replace("${currentdir}", currentDirectory.getAbsolutePath()) .toLowerCase().replaceAll("\\\\", "/"); String actual = stdOut.toString().replaceAll(LINE_SEPARATOR, "").replaceAll("\\\\", "/") .trim().toLowerCase(); assertEquals(expected, actual); } @Then("^the response should contain \"([^\"]*)\"$") public void the_response_should_contain(String expected) throws Throwable { String actual = stdOut.toString().replaceAll(LINE_SEPARATOR, "").replaceAll("\\\\", "/"); expected.replaceAll("\\\\", "/"); assertTrue(actual, actual.contains(expected)); } @Then("^the response should not contain \"([^\"]*)\"$") public void the_response_should_not_contain(String expected) throws Throwable { String actual = stdOut.toString().replaceAll(LINE_SEPARATOR, "").replaceAll("\\\\", "/"); expected.replaceAll("\\\\", "/"); assertFalse(actual, actual.contains(expected)); } @Then("^the response should contain ([^\"]*) lines$") public void the_response_should_contain_x_lines(int lines) throws Throwable { String[] lineStrings = stdOut.toString().split(LINE_SEPARATOR); assertEquals(lines, lineStrings.length); } @Then("^the response should start with \"([^\"]*)\"$") public void the_response_should_start_with(String expected) throws Throwable { String actual = stdOut.toString().replaceAll(LINE_SEPARATOR, ""); assertTrue(actual, actual.startsWith(expected)); } @Then("^the repository directory shall exist$") public void the_repository_directory_shall_exist() throws Throwable { List<String> output = runAndParseCommand("rev-parse", "--resolve-geogit-dir"); assertEquals(output.toString(), 1, output.size()); String location = output.get(0); assertNotNull(location); if (location.startsWith("Error:")) { fail(location); } File repoDir = new File(location); assertTrue("Repository directory not found: " + repoDir.getAbsolutePath(), repoDir.exists()); } @Given("^I have a remote ref called \"([^\"]*)\"$") public void i_have_a_remote_ref_called(String expected) throws Throwable { String ref = "refs/remotes/origin/" + expected; geogit.command(UpdateRef.class).setName(ref).setNewValue(ObjectId.NULL).call(); Optional<Ref> refValue = geogit.command(RefParse.class).setName(ref).call(); assertTrue(refValue.isPresent()); assertEquals(refValue.get().getObjectId(), ObjectId.NULL); } @Given("^I have an unconfigured repository$") public void I_have_an_unconfigured_repository() throws Throwable { setUpDirectories(); setupGeogit(); List<String> output = runAndParseCommand("init"); assertEquals(output.toString(), 1, output.size()); assertNotNull(output.get(0)); assertTrue(output.get(0), output.get(0).startsWith("Initialized")); } @Given("^I have a merge conflict state$") public void I_have_a_merge_conflict_state() throws Throwable { I_have_conflicting_branches(); Ref branch = geogit.command(RefParse.class).setName("branch1").call().get(); try { geogit.command(MergeOp.class).addCommit(Suppliers.ofInstance(branch.getObjectId())) .call(); fail(); } catch (IllegalStateException e) { } } @Given("^I have conflicting branches$") public void I_have_conflicting_branches() throws Throwable { // Create the following revision graph // ............o // ............| // ............o - Points 1 added // .........../|\ // branch2 - o | o - branch1 - Points 1 modifiedB and points 2 added // ............| // ............o - points 1 modified // ............| // ............o - master - HEAD - Lines 1 modified // branch1 and master are conflicting Feature points1ModifiedB = feature(pointsType, idP1, "StringProp1_3", new Integer(2000), "POINT(1 1)"); Feature points1Modified = feature(pointsType, idP1, "StringProp1_2", new Integer(1000), "POINT(1 1)"); insertAndAdd(points1); geogit.command(CommitOp.class).setMessage("Commit1").call(); geogit.command(BranchCreateOp.class).setName("branch1").call(); geogit.command(BranchCreateOp.class).setName("branch2").call(); insertAndAdd(points1Modified); geogit.command(CommitOp.class).setMessage("Commit2").call(); insertAndAdd(lines1); geogit.command(CommitOp.class).setMessage("Commit3").call(); geogit.command(CheckoutOp.class).setSource("branch1").call(); insertAndAdd(points1ModifiedB); insertAndAdd(points2); geogit.command(CommitOp.class).setMessage("Commit4").call(); geogit.command(CheckoutOp.class).setSource("branch2").call(); insertAndAdd(points3); geogit.command(CommitOp.class).call(); geogit.command(CheckoutOp.class).setSource("master").call(); } @Given("^there is a remote repository$") public void there_is_a_remote_repository() throws Throwable { I_am_in_an_empty_directory(); GeoGIT oldGeogit = geogit; Injector oldInjector = geogitCLI.getGeogitInjector(); geogitCLI.setGeogitInjector(GlobalInjectorBuilder.builder.build()); List<String> output = runAndParseCommand("init", "remoterepo"); assertEquals(output.toString(), 1, output.size()); assertNotNull(output.get(0)); assertTrue(output.get(0), output.get(0).startsWith("Initialized")); geogit = geogitCLI.getGeogit(); runCommand("config", "--global", "user.name", "John Doe"); runCommand("config", "--global", "user.email", "JohnDoe@example.com"); insertAndAdd(points1); runCommand(("commit -m Commit1").split(" ")); runCommand(("branch -c branch1").split(" ")); insertAndAdd(points2); runCommand(("commit -m Commit2").split(" ")); insertAndAdd(points3); runCommand(("commit -m Commit3").split(" ")); runCommand(("checkout master").split(" ")); insertAndAdd(lines1); runCommand(("commit -m Commit4").split(" ")); insertAndAdd(lines2); runCommand(("commit -m Commit5").split(" ")); geogit = oldGeogit; geogitCLI.setGeogit(oldGeogit); geogitCLI.setGeogitInjector(oldInjector); } @Given("^I have a repository$") public void I_have_a_repository() throws Throwable { I_have_an_unconfigured_repository(); runCommand("config", "--global", "user.name", "John Doe"); runCommand("config", "--global", "user.email", "JohnDoe@example.com"); } @Given("^I have a repository with a remote$") public void I_have_a_repository_with_a_remote() throws Throwable { there_is_a_remote_repository(); List<String> output = runAndParseCommand("init", "localrepo"); assertEquals(output.toString(), 1, output.size()); assertNotNull(output.get(0)); assertTrue(output.get(0), output.get(0).startsWith("Initialized")); runCommand("config", "--global", "user.name", "John Doe"); runCommand("config", "--global", "user.email", "JohnDoe@example.com"); runCommand("remote", "add", "origin", currentDirectory + "/remoterepo"); } private void setUpDirectories() throws IOException { homeDirectory = new File("target", "fakeHomeDir" + new Random().nextInt()); FileUtils.deleteDirectory(homeDirectory); assertFalse(homeDirectory.exists()); assertTrue(homeDirectory.mkdirs()); currentDirectory = new File("target", "testrepo" + new Random().nextInt()); FileUtils.deleteDirectory(currentDirectory); assertFalse(currentDirectory.exists()); assertTrue(currentDirectory.mkdirs()); } private void deleteDirectories() { try { FileUtils.deleteDirectory(homeDirectory); assertFalse(homeDirectory.exists()); FileUtils.deleteDirectory(currentDirectory); assertFalse(currentDirectory.exists()); } catch (IOException e) { } } @Given("^I have staged \"([^\"]*)\"$") public void I_have_staged(String feature) throws Throwable { if (feature.equals("points1")) { insertAndAdd(points1); } else if (feature.equals("points2")) { insertAndAdd(points2); } else if (feature.equals("points3")) { insertAndAdd(points3); } else if (feature.equals("points1_modified")) { insertAndAdd(points1_modified); } else if (feature.equals("lines1")) { insertAndAdd(lines1); } else if (feature.equals("lines2")) { insertAndAdd(lines2); } else if (feature.equals("lines3")) { insertAndAdd(lines3); } else { throw new Exception("Unknown Feature"); } } @Given("^I have 6 unstaged features$") public void I_have_6_unstaged_features() throws Throwable { insertFeatures(); } @Given("^I have unstaged \"([^\"]*)\"$") public void I_have_unstaged(String feature) throws Throwable { if (feature.equals("points1")) { insert(points1); } else if (feature.equals("points2")) { insert(points2); } else if (feature.equals("points3")) { insert(points3); } else if (feature.equals("points1_modified")) { insert(points1_modified); } else if (feature.equals("lines1")) { insert(lines1); } else if (feature.equals("lines2")) { insert(lines2); } else if (feature.equals("lines3")) { insert(lines3); } else { throw new Exception("Unknown Feature"); } } @Given("^I stage 6 features$") public void I_stage_6_features() throws Throwable { insertAndAddFeatures(); } @Given("^I have several commits$") public void I_have_several_commits() throws Throwable { insertAndAdd(points1); insertAndAdd(points2); runCommand(("commit -m Commit1").split(" ")); insertAndAdd(points3); insertAndAdd(lines1); runCommand(("commit -m Commit2").split(" ")); insertAndAdd(lines2); insertAndAdd(lines3); runCommand(("commit -m Commit3").split(" ")); insertAndAdd(points1_modified); runCommand(("commit -m Commit4").split(" ")); } @Given("^I have several branches") public void I_have_several_branches() throws Throwable { insertAndAdd(points1); runCommand(("commit -m Commit1").split(" ")); runCommand(("branch -c branch1").split(" ")); insertAndAdd(points2); runCommand(("commit -m Commit2").split(" ")); insertAndAdd(points3); runCommand(("commit -m Commit3").split(" ")); runCommand(("branch -c branch2").split(" ")); insertAndAdd(lines1); runCommand(("commit -m Commit4").split(" ")); runCommand(("checkout master").split(" ")); insertAndAdd(lines2); runCommand(("commit -m Commit5").split(" ")); } @Given("I modify and add a feature") public void I_modify_and_add_a_feature() throws Throwable { insertAndAdd(points1_modified); } @Given("I modify a feature") public void I_modify_a_feature() throws Throwable { insert(points1_modified); } @Given("^I modify a feature type$") public void I_modify_a_feature_type() throws Throwable { deleteAndReplaceFeatureType(); } @Then("^if I change to the respository subdirectory \"([^\"]*)\"$") public void if_I_change_to_the_respository_subdirectory(String subdirSpec) throws Throwable { String[] subdirs = subdirSpec.split("/"); File dir = currentDirectory; for (String subdir : subdirs) { dir = new File(dir, subdir); } assertTrue(dir.exists()); currentDirectory = dir; } @Given("^I have a patch file$") public void I_have_a_patch_file() throws Throwable { Patch patch = new Patch(); String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID()); Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap(); Optional<?> oldValue = Optional.fromNullable(points1.getProperty("sp").getValue()); GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, Optional.of("new")); map.put(pointsType.getDescriptor("sp"), diff); FeatureDiff feaureDiff = new FeatureDiff(path, map, RevFeatureType.build(pointsType), RevFeatureType.build(pointsType)); patch.addModifiedFeature(feaureDiff); File file = new File(currentDirectory, "test.patch"); BufferedWriter writer = Files.newWriter(file, Charsets.UTF_8); PatchSerializer.write(writer, patch); writer.flush(); writer.close(); } @Given("^I am inside a repository subdirectory \"([^\"]*)\"$") public void I_am_inside_a_repository_subdirectory(String subdirSpec) throws Throwable { String[] subdirs = subdirSpec.split("/"); File dir = currentDirectory; for (String subdir : subdirs) { dir = new File(dir, subdir); } assertTrue(dir.mkdirs()); currentDirectory = dir; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.api.ldap.model.url; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; import org.apache.directory.api.ldap.model.exception.LdapURLEncodingException; import org.apache.directory.api.ldap.model.message.SearchScope; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.url.LdapUrl; import org.apache.directory.api.ldap.model.url.LdapUrl.Extension; import org.junit.Test; import org.junit.runner.RunWith; import com.mycila.junit.concurrent.Concurrency; import com.mycila.junit.concurrent.ConcurrentJunitRunner; /** * Test the class LdapUrl * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @RunWith(ConcurrentJunitRunner.class) @Concurrency() public class LdapUrlTest { /** * Test a null LdapUrl */ @Test public void testLdapUrlNull() { assertEquals( "ldap:///", new LdapUrl().toString() ); } /** * test an empty LdapUrl */ @Test public void testDnEmpty() throws LdapURLEncodingException { assertEquals( "ldap:///", new LdapUrl( "" ).toString() ); } /** * test a simple LdapUrl */ @Test public void testDnSimple() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:80/", new LdapUrl( "ldap://directory.apache.org:80/" ) .toString() ); } /** * test a LdapUrl host 1 */ @Test public void testDnWithMinus() throws LdapURLEncodingException { assertEquals( "ldap://d-a.org:80/", new LdapUrl( "ldap://d-a.org:80/" ).toString() ); } /** * test a LdapUrl with a bad port */ @Test(expected = LdapURLEncodingException.class) public void testDnBadPort() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:/" ); } /** * test a LdapUrl with a bad port 2 */ @Test(expected = LdapURLEncodingException.class) public void testDnBadPort2() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:-1/" ); } /** * test a LdapUrl with a bad port 3 */ @Test(expected = LdapURLEncodingException.class) public void testDnBadPort3() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:abc/" ); } /** * test a LdapUrl with a bad port 4 */ @Test(expected = LdapURLEncodingException.class) public void testDnBadPort4() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:65536/" ); } /** * test a LdapUrl with no host */ @Test public void testDnBadHost1() throws LdapURLEncodingException { assertEquals( "ldap:///", new LdapUrl( "ldap:///" ).toString() ); } /** * test a LdapUrl with a bad host 2 */ @Test public void testDnBadHost2() throws LdapURLEncodingException { assertEquals( "ldap://./", new LdapUrl( "ldap://./" ).toString() ); } /** * test a LdapUrl with a bad host 3 */ @Test public void testDnBadHost3() throws LdapURLEncodingException { assertEquals( "ldap://a..b/", new LdapUrl( "ldap://a..b/" ).toString() ); } /** * test a LdapUrl with a bad host 4 */ @Test public void testDnBadHost4() throws LdapURLEncodingException { assertEquals( "ldap://-/", new LdapUrl( "ldap://-/" ).toString() ); } /** * test a LdapUrl with a bad host 5 */ @Test public void testDnBadHost5() throws LdapURLEncodingException { assertEquals( "ldap://a.b.c-/", new LdapUrl( "ldap://a.b.c-/" ).toString() ); } /** * test a LdapUrl with a bad host 6 */ @Test public void testDnBadHost6() throws LdapURLEncodingException { assertEquals( "ldap://a.b.-c/", new LdapUrl( "ldap://a.b.-c/" ).toString() ); new LdapUrl( "ldap://a.b.-c/" ); } /** * test a LdapUrl with a bad host 7 */ @Test public void testDnBadHost7() throws LdapURLEncodingException { assertEquals( "ldap://a.-.c/", new LdapUrl( "ldap://a.-.c/" ).toString() ); } /** * test a LdapUrl IP host */ @Test public void testDnIPV4Host() throws LdapURLEncodingException { assertEquals( "ldap://1.2.3.4/", new LdapUrl( "ldap://1.2.3.4/" ).toString() ); } /** * test a LdapUrl IP host and port */ @Test public void testDnIPV4HostPort() throws LdapURLEncodingException { assertEquals( "ldap://1.2.3.4:80/", new LdapUrl( "ldap://1.2.3.4:80/" ).toString() ); } /** * test a LdapUrl with a bad IP host 1 : we should not get an error, but the host will not be considered * as an IPV4 address */ @Test public void testDnBadHostIP1() throws LdapURLEncodingException { assertEquals( "ldap://1.1.1/", new LdapUrl( "ldap://1.1.1/" ).toString() ); } /** * test a LdapUrl with a bad IP host 1 : we should not get an error, but the host will not be considered * as an IPV4 address */ @Test public void testDnBadHostIP2() throws LdapURLEncodingException { assertEquals( "ldap://1.1.1./", new LdapUrl( "ldap://1.1.1./" ).toString() ); } /** * test a LdapUrl with a bad IP host 1 : we should not get an error, but the host will not be considered * as an IPV4 address */ @Test public void testDnBadHostIP3() throws LdapURLEncodingException { assertEquals( "ldap://1.1.1.100000/", new LdapUrl( "ldap://1.1.1.100000/" ).toString() ); } /** * test a LdapUrl with a bad IP host 4 */ @Test(expected = LdapURLEncodingException.class) public void testDnBadHostIP4() throws LdapURLEncodingException { new LdapUrl( "ldap://1.1.1.1.1/" ); } /** * test a LdapUrl with a valid host hich is not an IP */ @Test public void testDnNotAnIP() throws LdapURLEncodingException { assertEquals( "ldap://1.1.1.100000.a/", new LdapUrl( "ldap://1.1.1.100000.a/" ).toString() ); } /** * test a LdapUrl IPv6 host */ @Test public void testDnIPv6Host() throws LdapURLEncodingException { assertEquals( "ldap://[::]/", new LdapUrl( "ldap://[::]/" ).toString() ); assertEquals( "ldap://[1::2]/", new LdapUrl( "ldap://[1::2]/" ).toString() ); assertEquals( "ldap://[abcd:EF01:0234:5678:abcd:EF01:0234:5678]/", new LdapUrl( "ldap://[abcd:EF01:0234:5678:abcd:EF01:0234:5678]/" ).toString() ); assertEquals( "ldap://[::2]/", new LdapUrl( "ldap://[::2]/" ).toString() ); assertEquals( "ldap://[1:2::3:4]/", new LdapUrl( "ldap://[1:2::3:4]/" ).toString() ); assertEquals( "ldap://[1:2:3:4:5:6::]/", new LdapUrl( "ldap://[1:2:3:4:5:6::]/" ).toString() ); } /** * test a bad LdapUrl IPv6 host * @throws LdapURLEncodingException */ @Test( expected=LdapURLEncodingException.class ) public void testDnIPv6BadHost() throws LdapURLEncodingException { new LdapUrl( "ldap://[:]/" ); } /** * test a bad LdapUrl IPv6 host * @throws LdapURLEncodingException */ @Test( expected=LdapURLEncodingException.class ) public void testDnIPv6BadHost2() throws LdapURLEncodingException { new LdapUrl( "ldap://[1::2::3]/" ); } /** * test a LdapUrl with valid simpleDN */ @Test public void testDnSimpleDN() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example,dc=org/", new LdapUrl( "ldap://directory.apache.org:389/dc=example,dc=org/" ).toString() ); } /** * test a LdapUrl with valid simpleDN 2 */ @Test public void testDnSimpleDN2() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example", new LdapUrl( "ldap://directory.apache.org:389/dc=example" ).toString() ); } /** * test a LdapUrl with a valid encoded Dn */ @Test public void testDnSimpleDNEncoded() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example%202,dc=org", new LdapUrl( "ldap://directory.apache.org:389/dc=example%202,dc=org" ).toString() ); } /** * test a LdapUrl with an invalid Dn */ @Test(expected = LdapURLEncodingException.class) public void testDnInvalidDN() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:389/dc=example%202,dc : org" ); } /** * test a LdapUrl with an invalid Dn 2 */ @Test(expected = LdapURLEncodingException.class) public void testDnInvalidDN2() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:389/dc=example%202,dc = org," ); } /** * test a LdapUrl with valid unique attributes */ @Test public void testDnUniqueAttribute() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example,dc=org?ou", new LdapUrl( "ldap://directory.apache.org:389/dc=example,dc=org?ou" ).toString() ); } /** * test a LdapUrl with valid attributes */ @Test public void testDnAttributes() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example,dc=org?ou,objectclass,dc", new LdapUrl( "ldap://directory.apache.org:389/dc=example,dc=org?ou,objectclass,dc" ).toString() ); } /** * test a LdapUrl with valid duplicated attributes */ @Test public void testDnDuplicatedAttributes() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/dc=example,dc=org?ou,dc", new LdapUrl( "ldap://directory.apache.org:389/dc=example,dc=org?ou,dc,ou" ).toString() ); } /** * test a LdapUrl with invalid attributes */ @Test(expected = LdapURLEncodingException.class) public void testLdapInvalideAttributes() throws LdapURLEncodingException { new LdapUrl( "ldap://directory.apache.org:389/dc=example,dc=org?ou=,dc" ); } /** * test a LdapUrl with attributes but no Dn */ @Test public void testLdapNoDNAttributes() throws LdapURLEncodingException { assertEquals( "ldap://directory.apache.org:389/?ou,dc", new LdapUrl( "ldap://directory.apache.org:389/?ou,dc" ).toString() ); } /** * test 1 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_1() throws LdapURLEncodingException { assertEquals( "ldap:///o=University%20of%20Michigan,c=US", new LdapUrl( "ldap:///o=University%20of%20Michigan,c=US" ).toString() ); } /** * test 2 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_2() throws LdapURLEncodingException { assertEquals( "ldap://ldap.itd.umich.edu/o=University%20of%20Michigan,c=US", new LdapUrl( "ldap://ldap.itd.umich.edu/o=University%20of%20Michigan,c=US" ).toString() ); } /** * test 3 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_3() throws LdapURLEncodingException { assertEquals( "ldap://ldap.itd.umich.edu/o=University%20of%20Michigan,c=US?postalAddress", new LdapUrl( "ldap://ldap.itd.umich.edu/o=University%20of%20Michigan,c=US?postalAddress" ).toString() ); } /** * test 4 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_4() throws LdapURLEncodingException { assertEquals( "ldap://host.com:6666/o=University%20of%20Michigan,c=US??sub?(cn=Babs%20Jensen)", new LdapUrl( "ldap://host.com:6666/o=University%20of%20Michigan,c=US??sub?(cn=Babs%20Jensen)" ).toString() ); } /** * test 5 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_5() throws LdapURLEncodingException { assertEquals( "ldap://ldap.itd.umich.edu/c=GB?objectClass?one", new LdapUrl( "ldap://ldap.itd.umich.edu/c=GB?objectClass?one" ).toString() ); } /** * test 6 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_6() throws LdapURLEncodingException { assertEquals( "ldap://ldap.question.com/o=Question%3F,c=US?mail", new LdapUrl( "ldap://ldap.question.com/o=Question%3f,c=US?mail" ).toString() ); } /** * test 7 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_7() throws LdapURLEncodingException { assertEquals( "ldap://ldap.netscape.com/o=Babsco,c=US???(int=%5C00%5C00%5C00%5C04)", new LdapUrl( "ldap://ldap.netscape.com/o=Babsco,c=US???(int=%5c00%5c00%5c00%5c04)" ).toString() ); } /** * test 8 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_8() throws LdapURLEncodingException { assertEquals( "ldap:///??sub??bindname=cn=Manager%2co=Foo", new LdapUrl( "ldap:///??sub??bindname=cn=Manager%2co=Foo" ).toString() ); } /** * test 9 from RFC 2255 LdapUrl */ @Test public void testLdapRFC2255_9() throws LdapURLEncodingException { assertEquals( "ldap:///??sub??!bindname=cn=Manager%2co=Foo", new LdapUrl( "ldap:///??sub??!bindname=cn=Manager%2co=Foo" ).toString() ); } /** * test an empty ldaps:// LdapUrl */ @Test public void testDnEmptyLdaps() throws LdapURLEncodingException { assertEquals( "ldaps:///", new LdapUrl( "ldaps:///" ).toString() ); } /** * test an simple ldaps:// LdapUrl */ @Test public void testDnSimpleLdaps() throws LdapURLEncodingException { assertEquals( "ldaps://directory.apache.org:80/", new LdapUrl( "ldaps://directory.apache.org:80/" ) .toString() ); } /** * test the setScheme() method */ @Test public void testDnSetScheme() throws LdapURLEncodingException { LdapUrl url = new LdapUrl(); assertEquals( "ldap://", url.getScheme() ); url.setScheme( "invalid" ); assertEquals( "ldap://", url.getScheme() ); url.setScheme( "ldap://" ); assertEquals( "ldap://", url.getScheme() ); url.setScheme( "ldaps://" ); assertEquals( "ldaps://", url.getScheme() ); url.setScheme( null ); assertEquals( "ldap://", url.getScheme() ); } /** * test the setHost() method */ @Test public void testDnSetHost() throws LdapURLEncodingException { LdapUrl url = new LdapUrl(); assertNull( url.getHost() ); url.setHost( "ldap.apache.org" ); assertEquals( "ldap.apache.org", url.getHost() ); assertEquals( "ldap://ldap.apache.org/", url.toString() ); url.setHost( null ); assertNull( url.getHost() ); assertEquals( "ldap:///", url.toString() ); } /** * test the setPort() method */ @Test public void testDnSetPort() throws LdapURLEncodingException { LdapUrl url = new LdapUrl(); assertEquals( -1, url.getPort() ); url.setPort( 389 ); assertEquals( 389, url.getPort() ); assertEquals( "ldap://:389/", url.toString() ); url.setPort( 0 ); assertEquals( -1, url.getPort() ); assertEquals( "ldap:///", url.toString() ); url.setPort( 65536 ); assertEquals( -1, url.getPort() ); assertEquals( "ldap:///", url.toString() ); } /** * test the setDn() method */ @Test public void testDnSetDn() throws LdapURLEncodingException, LdapInvalidDnException { LdapUrl url = new LdapUrl(); assertNull( url.getDn() ); Dn dn = new Dn( "dc=example,dc=com" ); url.setDn( dn ); assertEquals( dn, url.getDn() ); assertEquals( "ldap:///dc=example,dc=com", url.toString() ); url.setDn( null ); assertNull( url.getDn() ); assertEquals( "ldap:///", url.toString() ); } /** * test the setAttributes() method */ @Test public void testDnSetAttributes() throws LdapURLEncodingException, LdapInvalidDnException { LdapUrl url = new LdapUrl(); assertNotNull( url.getAttributes() ); assertTrue( url.getAttributes().isEmpty() ); List<String> attributes = new ArrayList<String>(); url.setDn( new Dn( "dc=example,dc=com" ) ); url.setAttributes( null ); assertNotNull( url.getAttributes() ); assertTrue( url.getAttributes().isEmpty() ); assertEquals( "ldap:///dc=example,dc=com", url.toString() ); attributes.add( "cn" ); url.setAttributes( attributes ); assertNotNull( url.getAttributes() ); assertEquals( 1, url.getAttributes().size() ); assertEquals( "ldap:///dc=example,dc=com?cn", url.toString() ); attributes.add( "userPassword;binary" ); url.setAttributes( attributes ); assertNotNull( url.getAttributes() ); assertEquals( 2, url.getAttributes().size() ); assertEquals( "ldap:///dc=example,dc=com?cn,userPassword;binary", url.toString() ); } /** * test the setScope() method */ @Test public void testDnSetScope() throws LdapURLEncodingException, LdapInvalidDnException { LdapUrl url = new LdapUrl(); assertEquals( SearchScope.OBJECT, url.getScope() ); url.setDn( new Dn( "dc=example,dc=com" ) ); url.setScope( SearchScope.ONELEVEL ); assertEquals( SearchScope.ONELEVEL, url.getScope() ); assertEquals( "ldap:///dc=example,dc=com??one", url.toString() ); url.setScope( SearchScope.SUBTREE ); assertEquals( SearchScope.SUBTREE, url.getScope() ); assertEquals( "ldap:///dc=example,dc=com??sub", url.toString() ); url.setScope( -1 ); assertEquals( SearchScope.OBJECT, url.getScope() ); assertEquals( "ldap:///dc=example,dc=com", url.toString() ); } /** * test the setFilter() method */ @Test public void testDnSetFilter() throws LdapURLEncodingException, LdapInvalidDnException { LdapUrl url = new LdapUrl(); assertNull( url.getFilter() ); url.setDn( new Dn( "dc=example,dc=com" ) ); url.setFilter( "(objectClass=person)" ); assertEquals( "(objectClass=person)", url.getFilter() ); assertEquals( "ldap:///dc=example,dc=com???(objectClass=person)", url.toString() ); url.setFilter( "(cn=Babs Jensen)" ); assertEquals( "(cn=Babs Jensen)", url.getFilter() ); assertEquals( "ldap:///dc=example,dc=com???(cn=Babs%20Jensen)", url.toString() ); url.setFilter( null ); assertNull( url.getFilter() ); assertEquals( "ldap:///dc=example,dc=com", url.toString() ); } /** * test a LdapUrl without a scheme * */ @Test public void testLdapURLNoScheme() throws LdapURLEncodingException { try { new LdapUrl( "/ou=system" ); fail(); } catch ( LdapURLEncodingException luee ) { assertTrue( true ); } } /** * test a LdapUrl without a host but with a Dn * */ @Test public void testLdapURLNoHostDN() throws LdapURLEncodingException { try { LdapUrl url = new LdapUrl( "ldap:///ou=system" ); assertEquals( "ldap:///ou=system", url.toString() ); } catch ( LdapURLEncodingException luee ) { fail(); } } /** * test a LdapUrl with a host, no port, and a Dn * */ @Test public void testLdapURLHostNoPortDN() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost/ou=system" ); assertEquals( "ldap://localhost/ou=system", url.toString() ); } /** * test a LdapUrl with no host, a port, and a Dn * */ @Test(expected = LdapURLEncodingException.class) public void testLdapURLNoHostPortDN() throws LdapURLEncodingException { new LdapUrl( "ldap://:123/ou=system" ); fail(); } /** * test a LdapUrl with no Dn * */ @Test public void testLdapURLNoDN() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn and no attributes * */ @Test public void testLdapURLNoDNNoAttrs() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes and no scope * */ @Test public void testLdapURLNoDNNoAttrsNoScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, no scope and no filter * */ @Test public void testLdapURLNoDNNoAttrsNoScopeNoFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/???" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn and attributes * */ @Test public void testLdapURLDN() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system" ); assertEquals( "ldap://localhost:123/ou=system", url.toString() ); } /** * test a LdapUrl with a Dn and attributes * */ @Test public void testLdapURLDNAttrs() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?ou,dc,cn" ); assertEquals( "ldap://localhost:123/ou=system?ou,dc,cn", url.toString() ); } /** * test a LdapUrl with no Dn and attributes * */ @Test public void testLdapURLNoDNAttrs() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?ou,dc,cn" ); assertEquals( "ldap://localhost:123/?ou,dc,cn", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes an scope * */ @Test public void testLdapURLNoDNNoAttrsScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??sub" ); assertEquals( "ldap://localhost:123/??sub", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes an scope base * */ @Test public void testLdapURLNoDNNoAttrsScopeBase() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??base" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes an default scope * */ @Test public void testLdapURLNoDNNoAttrsDefaultScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes an scope * */ @Test public void testLdapURLDNNoAttrsScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??sub" ); assertEquals( "ldap://localhost:123/ou=system??sub", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes an scope base * */ @Test public void testLdapURLDNNoAttrsScopeBase() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??base" ); assertEquals( "ldap://localhost:123/ou=system", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes an default scope * */ @Test public void testLdapURLDNNoAttrsDefaultScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??" ); assertEquals( "ldap://localhost:123/ou=system", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes an scope * */ @Test public void testLdapURLNoDNAttrsScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?ou,cn?sub" ); assertEquals( "ldap://localhost:123/?ou,cn?sub", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes an scope base * */ @Test public void testLdapURLNoDNAttrsScopeBase() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?ou,cn?base" ); assertEquals( "ldap://localhost:123/?ou,cn", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes an default scope * */ @Test public void testLdapURLNoDNAttrsDefaultScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?ou,cn?" ); assertEquals( "ldap://localhost:123/?ou,cn", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes an scope * */ @Test public void testLdapURLDNAttrsScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?ou,cn?sub" ); assertEquals( "ldap://localhost:123/ou=system?ou,cn?sub", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes an scope base * */ @Test public void testLdapURLDNAttrsScopeBase() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?ou,cn?base" ); assertEquals( "ldap://localhost:123/ou=system?ou,cn", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes an default scope * */ @Test public void testLdapURLDNAttrsDefaultScope() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?ou,cn?" ); assertEquals( "ldap://localhost:123/ou=system?ou,cn", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, no scope and filter * */ @Test public void testLdapURLNoDNNoAttrsNoScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/???(cn=test)" ); assertEquals( "ldap://localhost:123/???(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, no scope and filter * */ @Test public void testLdapURLDNNoAttrsNoScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system???(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system???(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, no scope and filter * */ @Test public void testLdapURLNoDNAttrsNoScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,ou,dc??(cn=test)" ); assertEquals( "ldap://localhost:123/?cn,ou,dc??(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a scope and filter * */ @Test public void testLdapURLNoDNNoAttrsScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??sub?(cn=test)" ); assertEquals( "ldap://localhost:123/??sub?(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a base scope, and filter * */ @Test public void testLdapURLNoDNNoAttrsScopeBaseFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??base?(cn=test)" ); assertEquals( "ldap://localhost:123/???(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a scope and filter * */ @Test public void testLdapURLNoDNAttrsScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,ou,dc?sub?(cn=test)" ); assertEquals( "ldap://localhost:123/?cn,ou,dc?sub?(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a base scope, and filter * */ @Test public void testLdapURLNoDNAttrsScopeBaseFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,ou,dc?base?(cn=test)" ); assertEquals( "ldap://localhost:123/?cn,ou,dc??(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a scope and filter * */ @Test public void testLdapURLDNNoAttrsScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??sub?(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system??sub?(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a base scope, and filter * */ @Test public void testLdapURLDNNoAttrsScopeBaseFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??base?(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system???(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, no scope and filter * */ @Test public void testLdapURLDNAttrsNoScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,dc,ou??(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system?cn,dc,ou??(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a scope and filter * */ @Test public void testLdapURLDNAttrsScopeFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?sub?(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc?sub?(cn=test)", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a base scope, and filter * */ @Test public void testLdapURLDNAttrsScopeBaseFilter() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?base?(cn=test)" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc??(cn=test)", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, no scope, no filter and no extension * */ @Test public void testLdapURLNoDNNoAttrsNoScopeNoFilterNoExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/????" ); assertEquals( "ldap://localhost:123/", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, no scope, no filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsNoScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/????!a=b,!c" ); assertEquals( "ldap://localhost:123/????!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, no scope, a filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsNoScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/???(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/???(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a scope, no filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??sub??!a=b,!c" ); assertEquals( "ldap://localhost:123/??sub??!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a base scope, no filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsScopeBaseNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??base??!a=b,!c" ); assertEquals( "ldap://localhost:123/????!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a scope, a filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??sub?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/??sub?(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, no attributes, a base scope, a filter and some extensions * */ @Test public void testLdapURLNoDNNoAttrsScopeBaseFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/??base?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/???(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, no scope, no filter and some extensions * */ @Test public void testLdapURLNoDNAttrsNoScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou???!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou???!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, no scope, a filter and some extensions * */ @Test public void testLdapURLNoDNAttrsNoScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou??(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou??(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a scope, no filter and some extensions * */ @Test public void testLdapURLNoDNAttrsScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou?sub??!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou?sub??!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a base scope, no filter and some extensions * */ @Test public void testLdapURLNoDNAttrsScopeBaseNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou?base??!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou???!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a scope, a filter and some extensions * */ @Test public void testLdapURLNoDNAttrsScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou?sub?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou?sub?(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with no Dn, some attributes, a base scope, a filter and some extensions * */ @Test public void testLdapURLNoDNAttrsScopeBaseFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/?cn,dc,ou?base?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/?cn,dc,ou??(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, no scope, no filter and some extensions * */ @Test public void testLdapURLDNNoAttrsNoScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system????!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system????!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, no scope, a filter and some extensions * */ @Test public void testLdapURLDNNoAttrsNoScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system???(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system???(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a scope, no filter and some extensions * */ @Test public void testLdapURLDNNoAttrsScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??sub??!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system??sub??!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a base scope, no filter and some extensions * */ @Test public void testLdapURLDNNoAttrsScopeBaseNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??base??!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system????!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a scope, a filter and some extensions * */ @Test public void testLdapURLDNNoAttrsScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??sub?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system??sub?(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, no attributes, a base scope, a filter and some extensions * */ @Test public void testLdapURLDNNoAttrsScopeBaseFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system??base?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system???(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, no scope, no filter and some extensions * */ @Test public void testLdapURLDNAttrsNoScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc???!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc???!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, no scope, a filter and some extensions * */ @Test public void testLdapURLDNAttrsNoScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc??(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc??(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a scope, no filter and some extensions * */ @Test public void testLdapURLDNAttrsScopeNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?sub??!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc?sub??!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a base scope, no filter and some extensions * */ @Test public void testLdapURLDNAttrsScopeBaseNoFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?base??!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc???!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a scope, a filter and some extensions * */ @Test public void testLdapURLDNAttrsScopeFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?sub?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc?sub?(cn=test)?!a=b,!c", url.toString() ); } /** * test a LdapUrl with a Dn, some attributes, a base scope, a filter and some extensions * */ @Test public void testLdapURLDNAttrsScopeBaseFilterExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/ou=system?cn,ou,dc?base?(cn=test)?!a=b,!c" ); assertEquals( "ldap://localhost:123/ou=system?cn,ou,dc??(cn=test)?!a=b,!c", url.toString() ); } /** * Test a LdapUrl with an extension after an empty extension. */ @Test public void testLdapURLExtensionAfterEmptyExtension() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/????!a=b,!c,d=e" ); assertEquals( "ldap://localhost:123/????!a=b,!c,d=e", url.toString() ); } /** * Test the extension order of an LdapUrl. */ @Test public void testLdapURLExtensionOrder() throws LdapURLEncodingException { LdapUrl url = new LdapUrl( "ldap://localhost:123/????!a=b,!c,!x,d=e,f=g,!h=i" ); assertEquals( "ldap://localhost:123/????!a=b,!c,!x,d=e,f=g,!h=i", url.toString() ); List<Extension> extensions = url.getExtensions(); assertTrue( extensions.get( 0 ).isCritical() ); assertEquals( "a", extensions.get( 0 ).getType() ); assertEquals( "b", extensions.get( 0 ).getValue() ); assertTrue( extensions.get( 1 ).isCritical() ); assertEquals( "c", extensions.get( 1 ).getType() ); assertNull( extensions.get( 1 ).getValue() ); assertTrue( extensions.get( 2 ).isCritical() ); assertEquals( "x", extensions.get( 2 ).getType() ); assertNull( extensions.get( 2 ).getValue() ); assertFalse( extensions.get( 3 ).isCritical() ); assertEquals( "d", extensions.get( 3 ).getType() ); assertEquals( "e", extensions.get( 3 ).getValue() ); assertFalse( extensions.get( 4 ).isCritical() ); assertEquals( "f", extensions.get( 4 ).getType() ); assertEquals( "g", extensions.get( 4 ).getValue() ); assertTrue( extensions.get( 5 ).isCritical() ); assertEquals( "h", extensions.get( 5 ).getType() ); assertEquals( "i", extensions.get( 5 ).getValue() ); } /** * Test UTF-8 values in extension values. */ @Test public void testLdapURLExtensionWithUtf8Values() throws Exception { String germanChars = new String( new byte[] { ( byte ) 0xC3, ( byte ) 0x84, ( byte ) 0xC3, ( byte ) 0x96, ( byte ) 0xC3, ( byte ) 0x9C, ( byte ) 0xC3, ( byte ) 0x9F, ( byte ) 0xC3, ( byte ) 0xA4, ( byte ) 0xC3, ( byte ) 0xB6, ( byte ) 0xC3, ( byte ) 0xBC }, "UTF-8" ); LdapUrl url1 = new LdapUrl(); url1.setHost( "localhost" ); url1.setPort( 123 ); url1.setDn( Dn.EMPTY_DN ); url1.getExtensions().add( new Extension( false, "X-CONNECTION-NAME", germanChars ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=%C3%84%C3%96%C3%9C%C3%9F%C3%A4%C3%B6%C3%BC", url1 .toString() ); LdapUrl url2 = new LdapUrl( "ldap://localhost:123/????X-CONNECTION-NAME=%c3%84%c3%96%c3%9c%c3%9f%c3%a4%c3%b6%c3%bc" ); assertEquals( germanChars, url1.getExtensionValue( "X-CONNECTION-NAME" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=%C3%84%C3%96%C3%9C%C3%9F%C3%A4%C3%B6%C3%BC", url2 .toString() ); } /** * Test comma in extension value. */ @Test public void testLdapURLExtensionWithCommaValue() throws Exception { LdapUrl url1 = new LdapUrl(); url1.setHost( "localhost" ); url1.setPort( 123 ); url1.setDn( Dn.EMPTY_DN ); url1.getExtensions().add( new Extension( false, "X-CONNECTION-NAME", "," ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=%2c", url1.toString() ); LdapUrl url2 = new LdapUrl( "ldap://localhost:123/????X-CONNECTION-NAME=%2c" ); assertEquals( ",", url1.getExtensionValue( "X-CONNECTION-NAME" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=%2c", url2.toString() ); } /** * Test with RFC 3986 reserved characters in extension value. * * reserved = gen-delims / sub-delims * gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" * sub-delims = "!" / "$" / "&" / "'" / "(" / ")" * / "*" / "+" / "," / ";" / "=" * * RFC 4516 specifies that '?' and a ',' must be percent encoded. * */ @Test public void testLdapURLExtensionWithRFC3986ReservedCharsAndRFC4616Exception() throws Exception { LdapUrl url1 = new LdapUrl(); url1.setHost( "localhost" ); url1.setPort( 123 ); url1.setDn( Dn.EMPTY_DN ); url1.getExtensions().add( new Extension( false, "X-CONNECTION-NAME", ":/?#[]@!$&'()*+,;=" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=:/%3F#[]@!$&'()*+%2c;=", url1.toString() ); LdapUrl url2 = new LdapUrl( "ldap://localhost:123/????X-CONNECTION-NAME=:/%3f#[]@!$&'()*+%2c;=" ); assertEquals( ":/?#[]@!$&'()*+,;=", url1.getExtensionValue( "X-CONNECTION-NAME" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=:/%3F#[]@!$&'()*+%2c;=", url2.toString() ); } /** * Test with RFC 3986 unreserved characters in extension value. * * unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" */ @Test public void testLdapURLExtensionWithRFC3986UnreservedChars() throws Exception { LdapUrl url1 = new LdapUrl(); url1.setHost( "localhost" ); url1.setPort( 123 ); url1.setDn( Dn.EMPTY_DN ); url1.getExtensions().add( new Extension( false, "X-CONNECTION-NAME", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~", url1.toString() ); LdapUrl url2 = new LdapUrl( "ldap://localhost:123/????X-CONNECTION-NAME=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~" ); assertEquals( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~", url1 .getExtensionValue( "X-CONNECTION-NAME" ) ); assertEquals( "ldap://localhost:123/????X-CONNECTION-NAME=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._~", url2.toString() ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.scr.integration; import junit.framework.TestCase; import org.apache.felix.scr.Component; import org.apache.felix.scr.integration.components.SimpleComponent; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.junit.JUnit4TestRunner; import org.osgi.framework.Constants; @RunWith(JUnit4TestRunner.class) public class ComponentConfigurationTest extends ComponentTestBase { static { // uncomment to enable debugging of this test class // paxRunnerVmOption = DEBUG_VM_OPTION; } @Test public void test_SimpleComponent_configuration_ignore() { final String pid = "SimpleComponent.configuration.ignore"; final Component component = findComponentByName( pid ); deleteConfig( pid ); delay(); TestCase.assertNotNull( component ); TestCase.assertFalse( component.isDefaultEnabled() ); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); component.enable(); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( SimpleComponent.INSTANCE ); TestCase.assertNull( SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); configure( pid ); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( SimpleComponent.INSTANCE ); TestCase.assertNull( SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); deleteConfig( pid ); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( SimpleComponent.INSTANCE ); TestCase.assertNull( SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); component.disable(); delay(); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); } @Test public void test_SimpleComponent_configuration_optional() { final String pid = "SimpleComponent.configuration.optional"; final Component component = findComponentByName( pid ); deleteConfig( pid ); delay(); TestCase.assertNotNull( component ); TestCase.assertFalse( component.isDefaultEnabled() ); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); component.enable(); delay(); final SimpleComponent firstInstance = SimpleComponent.INSTANCE; TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( firstInstance ); TestCase.assertNull( firstInstance.getProperty( PROP_NAME ) ); configure( pid ); delay(); final SimpleComponent secondInstance = SimpleComponent.INSTANCE; TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( secondInstance ); TestCase.assertEquals( PROP_NAME, secondInstance.getProperty( PROP_NAME ) ); deleteConfig( pid ); delay(); final SimpleComponent thirdInstance = SimpleComponent.INSTANCE; TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( thirdInstance ); TestCase.assertNull( thirdInstance.getProperty( PROP_NAME ) ); TestCase.assertNotSame( "Expect new instance object after reconfiguration", firstInstance, secondInstance ); TestCase.assertNotSame( "Expect new instance object after configuration deletion (1)", firstInstance, thirdInstance ); TestCase.assertNotSame( "Expect new instance object after configuration deletion (2)", secondInstance, thirdInstance ); component.disable(); delay(); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); } @Test public void test_SimpleComponent_configuration_require() { final String pid = "SimpleComponent.configuration.require"; final Component component = findComponentByName( pid ); deleteConfig( pid ); delay(); TestCase.assertNotNull( component ); TestCase.assertFalse( component.isDefaultEnabled() ); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); component.enable(); delay(); TestCase.assertEquals( Component.STATE_UNSATISFIED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); configure( pid ); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( SimpleComponent.INSTANCE ); TestCase.assertEquals( PROP_NAME, SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); deleteConfig( pid ); delay(); TestCase.assertEquals( Component.STATE_UNSATISFIED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); component.disable(); delay(); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); } @Test public void test_SimpleComponent_dynamic_configuration() { final String pid = "DynamicConfigurationComponent"; final Component component = findComponentByName( pid ); deleteConfig( pid ); delay(); TestCase.assertNotNull( component ); TestCase.assertFalse( component.isDefaultEnabled() ); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); component.enable(); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotNull( SimpleComponent.INSTANCE ); TestCase.assertNull( SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); TestCase.assertEquals( pid, SimpleComponent.INSTANCE.getProperty( Constants.SERVICE_PID ) ); final SimpleComponent instance = SimpleComponent.INSTANCE; configure( pid ); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertEquals( instance, SimpleComponent.INSTANCE ); TestCase.assertEquals( PROP_NAME, SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); TestCase.assertEquals( pid, SimpleComponent.INSTANCE.getProperty( Constants.SERVICE_PID ) ); deleteConfig( pid ); delay(); TestCase.assertEquals( Component.STATE_ACTIVE, component.getState() ); TestCase.assertNotSame( instance, SimpleComponent.INSTANCE ); TestCase.assertNull( SimpleComponent.INSTANCE.getProperty( PROP_NAME ) ); TestCase.assertEquals( pid, SimpleComponent.INSTANCE.getProperty( Constants.SERVICE_PID ) ); component.disable(); delay(); TestCase.assertEquals( Component.STATE_DISABLED, component.getState() ); TestCase.assertNull( SimpleComponent.INSTANCE ); } @Test public void test_SimpleComponent_factory_configuration() { final String factoryPid = "FactoryConfigurationComponent"; deleteFactoryConfigurations( factoryPid ); delay(); // one single component exists without configuration final Component[] noConfigurations = findComponentsByName( factoryPid ); TestCase.assertNotNull( noConfigurations ); TestCase.assertEquals( 1, noConfigurations.length ); TestCase.assertEquals( Component.STATE_DISABLED, noConfigurations[0].getState() ); TestCase.assertTrue( SimpleComponent.INSTANCES.isEmpty() ); // enable the component, configuration required, hence unsatisfied noConfigurations[0].enable(); delay(); final Component[] enabledNoConfigs = findComponentsByName( factoryPid ); TestCase.assertNotNull( enabledNoConfigs ); TestCase.assertEquals( 1, enabledNoConfigs.length ); TestCase.assertEquals( Component.STATE_UNSATISFIED, enabledNoConfigs[0].getState() ); TestCase.assertTrue( SimpleComponent.INSTANCES.isEmpty() ); // create two factory configurations expecting two components final String pid0 = createFactoryConfiguration( factoryPid ); final String pid1 = createFactoryConfiguration( factoryPid ); delay(); // expect two components, only first is active, second is disabled final Component[] twoConfigs = findComponentsByName( factoryPid ); TestCase.assertNotNull( twoConfigs ); TestCase.assertEquals( 2, twoConfigs.length ); // find the active and inactive configs, fail if none int activeConfig; int inactiveConfig; if ( twoConfigs[0].getState() == Component.STATE_ACTIVE ) { // [0] is active, [1] expected disabled activeConfig = 0; inactiveConfig = 1; } else if ( twoConfigs[1].getState() == Component.STATE_ACTIVE ) { // [1] is active, [0] expected disabled activeConfig = 1; inactiveConfig = 0; } else { TestCase.fail( "One of two components expected active" ); return; // eases the compiler... } TestCase.assertEquals( Component.STATE_ACTIVE, twoConfigs[activeConfig].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, twoConfigs[inactiveConfig].getState() ); TestCase.assertEquals( 1, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( twoConfigs[activeConfig].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( twoConfigs[inactiveConfig].getId() ) ); // enable second component twoConfigs[inactiveConfig].enable(); delay(); // ensure both components active TestCase.assertEquals( Component.STATE_ACTIVE, twoConfigs[0].getState() ); TestCase.assertEquals( Component.STATE_ACTIVE, twoConfigs[1].getState() ); TestCase.assertEquals( 2, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( twoConfigs[0].getId() ) ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( twoConfigs[1].getId() ) ); // delete a configuration deleteConfig( pid0 ); delay(); // expect one component final Component[] oneConfig = findComponentsByName( factoryPid ); TestCase.assertNotNull( oneConfig ); TestCase.assertEquals( 1, oneConfig.length ); TestCase.assertEquals( Component.STATE_ACTIVE, oneConfig[0].getState() ); TestCase.assertEquals( 1, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( oneConfig[0].getId() ) ); // delete second configuration deleteConfig( pid1 ); delay(); // expect a single unsatisfied component final Component[] configsDeleted = findComponentsByName( factoryPid ); TestCase.assertNotNull( configsDeleted ); TestCase.assertEquals( 1, configsDeleted.length ); TestCase.assertEquals( Component.STATE_UNSATISFIED, configsDeleted[0].getState() ); TestCase.assertEquals( 0, SimpleComponent.INSTANCES.size() ); } @Test public void test_SimpleComponent_factory_configuration_enabled() { final String factoryPid = "FactoryConfigurationComponent_enabled"; deleteFactoryConfigurations( factoryPid ); delay(); // one single component exists without configuration final Component[] enabledNoConfigs = findComponentsByName( factoryPid ); TestCase.assertNotNull( enabledNoConfigs ); TestCase.assertEquals( 1, enabledNoConfigs.length ); TestCase.assertEquals( Component.STATE_UNSATISFIED, enabledNoConfigs[0].getState() ); TestCase.assertTrue( SimpleComponent.INSTANCES.isEmpty() ); // create two factory configurations expecting two components final String pid0 = createFactoryConfiguration( factoryPid ); final String pid1 = createFactoryConfiguration( factoryPid ); delay(); // expect two components, all active final Component[] twoConfigs = findComponentsByName( factoryPid ); TestCase.assertNotNull( twoConfigs ); TestCase.assertEquals( 2, twoConfigs.length ); TestCase.assertEquals( Component.STATE_ACTIVE, twoConfigs[0].getState() ); TestCase.assertEquals( Component.STATE_ACTIVE, twoConfigs[1].getState() ); TestCase.assertEquals( 2, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( twoConfigs[0].getId() ) ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( twoConfigs[1].getId() ) ); // disable the name component SimpleComponent.INSTANCES.values().iterator().next().m_activateContext.disableComponent( factoryPid ); delay(); // expect two disabled components final Component[] twoConfigsDisabled = findComponentsByName( factoryPid ); TestCase.assertNotNull( twoConfigsDisabled ); TestCase.assertEquals( 2, twoConfigsDisabled.length ); TestCase.assertEquals( Component.STATE_DISABLED, twoConfigsDisabled[0].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, twoConfigsDisabled[1].getState() ); TestCase.assertEquals( 0, SimpleComponent.INSTANCES.size() ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( twoConfigs[0].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( twoConfigs[1].getId() ) ); // create a configuration final String pid3 = createFactoryConfiguration( factoryPid ); delay(); // expect three disabled components final Component[] threeConfigsDisabled = findComponentsByName( factoryPid ); TestCase.assertNotNull( threeConfigsDisabled ); TestCase.assertEquals( 3, threeConfigsDisabled.length ); TestCase.assertEquals( Component.STATE_DISABLED, threeConfigsDisabled[0].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, threeConfigsDisabled[1].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, threeConfigsDisabled[2].getState() ); TestCase.assertEquals( 0, SimpleComponent.INSTANCES.size() ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( threeConfigsDisabled[0].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( threeConfigsDisabled[1].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( threeConfigsDisabled[2].getId() ) ); // enable a single component (to get ComponentContext later) threeConfigsDisabled[0].enable(); delay(); // expect one enabled and two disabled components final Component[] threeConfigs21 = findComponentsByName( factoryPid ); TestCase.assertNotNull( threeConfigs21 ); TestCase.assertEquals( 3, threeConfigs21.length ); TestCase.assertEquals( Component.STATE_ACTIVE, threeConfigs21[0].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, threeConfigs21[1].getState() ); TestCase.assertEquals( Component.STATE_DISABLED, threeConfigs21[2].getState() ); TestCase.assertEquals( 1, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( threeConfigs21[0].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( threeConfigs21[1].getId() ) ); TestCase.assertFalse( SimpleComponent.INSTANCES.containsKey( threeConfigs21[2].getId() ) ); // enable all components now SimpleComponent.INSTANCES.values().iterator().next().m_activateContext.enableComponent( factoryPid ); delay(); // expect all enabled final Component[] threeConfigsEnabled = findComponentsByName( factoryPid ); TestCase.assertNotNull( threeConfigsEnabled ); TestCase.assertEquals( 3, threeConfigsEnabled.length ); TestCase.assertEquals( Component.STATE_ACTIVE, threeConfigsEnabled[0].getState() ); TestCase.assertEquals( Component.STATE_ACTIVE, threeConfigsEnabled[1].getState() ); TestCase.assertEquals( Component.STATE_ACTIVE, threeConfigsEnabled[2].getState() ); TestCase.assertEquals( 3, SimpleComponent.INSTANCES.size() ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( threeConfigsEnabled[0].getId() ) ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( threeConfigsEnabled[1].getId() ) ); TestCase.assertTrue( SimpleComponent.INSTANCES.containsKey( threeConfigsEnabled[2].getId() ) ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.karaf.eik.wtp.core.server; import org.apache.karaf.eik.workbench.MBeanProvider; import org.apache.karaf.eik.wtp.core.KarafServerLaunchConfigurationInitializer; import org.apache.karaf.eik.wtp.core.KarafWtpPluginActivator; import java.io.IOException; import javax.management.ObjectName; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.debug.core.DebugException; import org.eclipse.debug.core.ILaunch; import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy; import org.eclipse.wst.server.core.IServer; import org.eclipse.wst.server.core.model.ServerBehaviourDelegate; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.jmx.framework.FrameworkMBean; import org.osgi.util.tracker.ServiceTracker; import org.osgi.util.tracker.ServiceTrackerCustomizer; public class KarafServerBehavior extends ServerBehaviourDelegate { private final class MBeanProviderServiceTracker implements ServiceTrackerCustomizer { private final ServiceTracker serviceTracker; public MBeanProviderServiceTracker() { final BundleContext context = KarafWtpPluginActivator.getDefault().getBundle().getBundleContext(); serviceTracker = new ServiceTracker(context, MBeanProvider.class.getName(), this); serviceTracker.open(); } /** * Adds the {@link MBeanProvider} service to this server instance. This * service is how the workbench interacts with the running server. It * also serves as a sentry that indicates the server is operational. * * @param reference */ @Override public Object addingService(final ServiceReference reference) { final String serviceMemento = (String) reference.getProperty(MBeanProvider.KARAF_WORKBENCH_SERVICES_ID); if (serviceMemento.equals(memento)) { final Object o = reference.getBundle().getBundleContext().getService(reference); setServerState(IServer.STATE_STARTED); mbeanProvider = (MBeanProvider) o; return o; } return null; } public void close() { serviceTracker.close(); } @Override public void modifiedService(final ServiceReference reference, final Object service) { } @Override public void removedService(final ServiceReference reference, final Object service) { final String serviceMemento = (String) reference.getProperty(MBeanProvider.KARAF_WORKBENCH_SERVICES_ID); if (serviceMemento.equals(memento)) { mbeanProvider = null; terminate(); } } } private static final ObjectName FRAMEWORK; static { /* * If this throws an exception we're in trouble because it means that the * constants are invalid */ try { FRAMEWORK = new ObjectName("osgi.core:type=framework,version=1.5"); } catch (final Exception e) { throw new IllegalStateException("The OSGi JMX implementation references an invalid ObjectName", e); } } private volatile MBeanProvider mbeanProvider; private final MBeanProviderServiceTracker mbeanProviderServiceTracker = new MBeanProviderServiceTracker(); private volatile String memento; private final int SERVER_TERMINATE_JOB_SCHEDULE_DELAY = 5000;; public void configureLaunch(final ILaunch launch, final String launchMode, final IProgressMonitor monitor) throws CoreException { setServerRestartState(false); setServerState(IServer.STATE_STARTING); setMode(launchMode); monitor.worked(1); memento = launch.getLaunchConfiguration().getMemento(); monitor.worked(1); } @Override public void setupLaunchConfiguration(final ILaunchConfigurationWorkingCopy workingCopy, IProgressMonitor monitor) throws CoreException { super.setupLaunchConfiguration(workingCopy, monitor); if (monitor == null) { monitor = new NullProgressMonitor(); } KarafServerLaunchConfigurationInitializer.initializeConfiguration(workingCopy); monitor.worked(10); } @Override public void stop(final boolean force) { mbeanProviderServiceTracker.close(); if (force) { terminate(); return; } final int state = getServer().getServerState(); if (state == IServer.STATE_STOPPED || state == IServer.STATE_STOPPING) { return; } else if (state == IServer.STATE_STARTING) { terminate(); return; } else { setServerState(IServer.STATE_STOPPING); try { if (mbeanProvider != null && mbeanProvider.isOpen()) { mbeanProvider.getMBean(FRAMEWORK, FrameworkMBean.class).shutdownFramework(); mbeanProvider.close(); } } catch (final IOException e) { } final Job j = new Job("Waiting for server to stop...") { @Override protected IStatus run(final IProgressMonitor monitor) { try { final ILaunch launch = getServer().getLaunch(); if (launch != null) { launch.terminate(); } } catch (final DebugException e) { // Do nothing } setServerState(IServer.STATE_STOPPED); return Status.OK_STATUS; } }; j.setSystem(true); j.schedule(SERVER_TERMINATE_JOB_SCHEDULE_DELAY); } } @Override protected void publishServer(final int kind, final IProgressMonitor monitor) throws CoreException { if (getServer().getRuntime() == null) { return; } monitor.done(); setServerPublishState(IServer.PUBLISH_STATE_NONE); } /** * Terminates the launcher forcibly without regard to application state. */ protected void terminate() { if (getServer().getServerState() == IServer.STATE_STOPPED) { return; } try { setServerState(IServer.STATE_STOPPING); final ILaunch launch = getServer().getLaunch(); if (launch != null) { launch.terminate(); } } catch (final Exception e) { // Ignore as this is forcibly terminating the server } finally { setServerState(IServer.STATE_STOPPED); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.ozone.container.common.states.endpoint; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.PipelineReportsProto; import org.apache.hadoop.ozone.container.common.statemachine .EndpointStateMachine; import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.NodeReportProto; import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsProto; import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMRegisteredResponseProto; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.Future; /** * Register a datanode with SCM. */ public final class RegisterEndpointTask implements Callable<EndpointStateMachine.EndPointStates> { static final Logger LOG = LoggerFactory.getLogger(RegisterEndpointTask.class); private final EndpointStateMachine rpcEndPoint; private final Configuration conf; private Future<EndpointStateMachine.EndPointStates> result; private DatanodeDetails datanodeDetails; private final OzoneContainer datanodeContainerManager; private StateContext stateContext; /** * Creates a register endpoint task. * * @param rpcEndPoint - endpoint * @param conf - conf * @param ozoneContainer - container */ @VisibleForTesting public RegisterEndpointTask(EndpointStateMachine rpcEndPoint, Configuration conf, OzoneContainer ozoneContainer, StateContext context) { this.rpcEndPoint = rpcEndPoint; this.conf = conf; this.datanodeContainerManager = ozoneContainer; this.stateContext = context; } /** * Get the DatanodeDetails. * * @return DatanodeDetailsProto */ public DatanodeDetails getDatanodeDetails() { return datanodeDetails; } /** * Set the contiainerNodeID Proto. * * @param datanodeDetails - Container Node ID. */ public void setDatanodeDetails( DatanodeDetails datanodeDetails) { this.datanodeDetails = datanodeDetails; } /** * Computes a result, or throws an exception if unable to do so. * * @return computed result * @throws Exception if unable to compute a result */ @Override public EndpointStateMachine.EndPointStates call() throws Exception { if (getDatanodeDetails() == null) { LOG.error("DatanodeDetails cannot be null in RegisterEndpoint task, " + "shutting down the endpoint."); return rpcEndPoint.setState(EndpointStateMachine.EndPointStates.SHUTDOWN); } rpcEndPoint.lock(); try { ContainerReportsProto containerReport = datanodeContainerManager .getContainerReport(); NodeReportProto nodeReport = datanodeContainerManager.getNodeReport(); PipelineReportsProto pipelineReportsProto = datanodeContainerManager.getPipelineReport(); // TODO : Add responses to the command Queue. SCMRegisteredResponseProto response = rpcEndPoint.getEndPoint() .register(datanodeDetails.getProtoBufMessage(), nodeReport, containerReport, pipelineReportsProto); Preconditions.checkState(UUID.fromString(response.getDatanodeUUID()) .equals(datanodeDetails.getUuid()), "Unexpected datanode ID in the response."); Preconditions.checkState(!StringUtils.isBlank(response.getClusterID()), "Invalid cluster ID in the response."); if (response.hasHostname() && response.hasIpAddress()) { datanodeDetails.setHostName(response.getHostname()); datanodeDetails.setIpAddress(response.getIpAddress()); } EndpointStateMachine.EndPointStates nextState = rpcEndPoint.getState().getNextState(); rpcEndPoint.setState(nextState); rpcEndPoint.zeroMissedCount(); this.stateContext.configureHeartbeatFrequency(); } catch (IOException ex) { rpcEndPoint.logIfNeeded(ex); } finally { rpcEndPoint.unlock(); } return rpcEndPoint.getState(); } /** * Returns a builder class for RegisterEndPoint task. * * @return Builder. */ public static Builder newBuilder() { return new Builder(); } /** * Builder class for RegisterEndPoint task. */ public static class Builder { private EndpointStateMachine endPointStateMachine; private Configuration conf; private DatanodeDetails datanodeDetails; private OzoneContainer container; private StateContext context; /** * Constructs the builder class. */ public Builder() { } /** * Sets the endpoint state machine. * * @param rpcEndPoint - Endpoint state machine. * @return Builder */ public Builder setEndpointStateMachine(EndpointStateMachine rpcEndPoint) { this.endPointStateMachine = rpcEndPoint; return this; } /** * Sets the Config. * * @param config - config * @return Builder. */ public Builder setConfig(Configuration config) { this.conf = config; return this; } /** * Sets the NodeID. * * @param dnDetails - NodeID proto * @return Builder */ public Builder setDatanodeDetails(DatanodeDetails dnDetails) { this.datanodeDetails = dnDetails; return this; } /** * Sets the ozonecontainer. * @param ozoneContainer * @return Builder */ public Builder setOzoneContainer(OzoneContainer ozoneContainer) { this.container = ozoneContainer; return this; } public Builder setContext(StateContext stateContext) { this.context = stateContext; return this; } public RegisterEndpointTask build() { if (endPointStateMachine == null) { LOG.error("No endpoint specified."); throw new IllegalArgumentException("A valid endpoint state machine is" + " needed to construct RegisterEndPoint task"); } if (conf == null) { LOG.error("No config specified."); throw new IllegalArgumentException( "A valid configuration is needed to construct RegisterEndpoint " + "task"); } if (datanodeDetails == null) { LOG.error("No datanode specified."); throw new IllegalArgumentException("A vaild Node ID is needed to " + "construct RegisterEndpoint task"); } if (container == null) { LOG.error("Container is not specified"); throw new IllegalArgumentException("Container is not specified to " + "construct RegisterEndpoint task"); } if (context == null) { LOG.error("StateContext is not specified"); throw new IllegalArgumentException("Container is not specified to " + "construct RegisterEndpoint task"); } RegisterEndpointTask task = new RegisterEndpointTask(this .endPointStateMachine, this.conf, this.container, this.context); task.setDatanodeDetails(datanodeDetails); return task; } } }
package com.bluedot.commons.security; import java.util.Iterator; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Lob; import javax.persistence.Transient; import org.apache.commons.lang3.StringEscapeUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.bluedot.commons.error.APIException; import com.bluedot.commons.error.APIException.APIErrors; import com.play4jpa.jpa.models.Model; @Entity public class Settings extends Model<Settings> { private static final long serialVersionUID = -1796695127142342338L; @Id @GeneratedValue private long id; @Lob public String jsonSettings = "{}"; @Transient private SettingsPrototype parent; public enum DefaultSetting { // CheckInTimeHour(Constants.CheckInTimeHour, "16"), // CheckOutTimeHour(Constants.CheckOutTimeHour, "11"), // CheckInTimeMinute(Constants.CheckInTimeMinute, "00"), // CheckOutTimeMinute(Constants.CheckOutTimeMinute, "00"), // ThermostatConditioningActive(Constants.THERMOSTAT_CONDITIONING_ACTIVE, false), // BeforeReservationTemp(Constants.BeforeReservationTemp, "70"), // LightsConditioningActive(Constants.LIGHTS_CONDITIONING_ACTIVE, false), // AfterReservationActivateThermostatMinute(Constants.AfterReservationActivateThermostatMinute, "30"), // AfterReservationActivateThermostatHour(Constants.AfterReservationActivateThermostatHour, "0"), // BeforeReservationActivateThermostatHour(Constants.BeforeReservationActivateThermostatHour, "1"), // BeforeReservationActivateThermostatMinute(Constants.BeforeReservationActivateThermostatMinute, "0"), // BeforeReservationTurnLightsOnHour(Constants.BeforeReservationTurnLightsOnHour, "1"), // BeforeReservationTurnLightsOnMinute(Constants.BeforeReservationTurnLightsOnMinute, "0"), // AfterReservationTurnLightsOffHour(Constants.AfterReservationTurnLightsOffHour, "0"), // AfterReservationTurnLightsOffMinute(Constants.AfterReservationTurnLightsOffMinute, "30"), // PreviousCheckInRunHour(Constants.PreviousCheckInRunHour, "6"), // PreviousCheckInHour(Constants.PreviousCheckInHour, 10), // ThermostatRangeActive(Constants.THERMOSTAT_RANGE_ACTIVE, false), // ThermostatMaxTemp(Constants.ThermostatMaxTemp, "80"), // ThermostatMinTemp(Constants.ThermostatMinTemp, "55"), timezone(Constants.Timezone,"EST"); // autoCheckInEnabled(Constants.AutoCheckInEnabled,false), // autoCheckOutEnabled(Constants.AutoCheckOutEnabled,false), // NotifyGuestOnEmergencyCodeActivated(Constants.NOTIFY_GUEST_ON_EMERGENCY_CODE_ACTIVATED, false), // NotifyGuestOnPropertyReady(Constants.NOTIFY_GUEST_ON_PROPERTY_READY, false), // NotifyGuest(Constants.NOTIFY_GUEST, true), // KabaNormalStayLevel(Constants.KabaNormalStayLevel, 0), // KabaEarlyCheckInLevel(Constants.KabaEarlyCheckInLevel, 1), // KabaMaxQuantityPincodes(Constants.KabaMaxQuantityPincodes,3), // KabaReservationsEnabled(Constants.KabaReservationsEnabled,false), // // // PoolControllerDoConditioning(Constants.POOL_CONTROLLER_DO_CONDITIONING, false), // // PoolControllerUnoccupiedStartTimeHour(Constants.POOL_CONTROLLER_UNOCCUPIED_START_TIME_HOUR,10), // PoolControllerUnoccupiedStartTimeMInute(Constants.POOL_CONTROLLER_UNOCCUPIED_START_TIME_MINUTE,0), // PoolControllerUnoccupiedEndTimeHour(Constants.POOL_CONTROLLER_UNOCCUPIED_END_TIME_HOUR,12), // PoolControllerUnoccupiedEndTimeMinute(Constants.POOL_CONTROLLER_UNOCCUPIED_END_TIME_MINUTE,0), // PoolControllerUnoccupiedSpeed(Constants.POOL_CONTROLLER_UNOCCUPIED_POOL_SPEED,PoolSpeed.LOW.toString()), // // PoolControllerOccupiedStartTimeHour(Constants.POOL_CONTROLLER_OCCUPIED_START_TIME_HOUR,10), // PoolControllerOccupiedStartTimeMinute(Constants.POOL_CONTROLLER_OCCUPIED_START_TIME_MINUTE,0), // PoolControllerOccupiedEndTimeHour(Constants.POOL_CONTROLLER_OCCUPIED_END_TIME_HOUR,16), // PoolControllerOccupiedEndTimeMinute(Constants.POOL_CONTROLLER_OCCUPIED_END_TIME_MINUTE,0), // PoolControllerOccupiedSpeed(Constants.POOL_CONTROLLER_OCCUPIED_POOL_SPEED,PoolSpeed.HI.toString()), // // PoolControllerPreRentalConditioner(Constants.POOL_CONTROLLER_PRE_RENTAL_CONDITIONER_ACTIVE,true), // PoolControllerPreRentalSpaTemp(Constants.POOL_CONTROLLER_SPA_TEMP,75), // PoolControllerPreRentalPoolTemp(Constants.POOL_CONTROLLER_POOL_TEMP,75), // PoolControllerPreRentalPoolSpeed(Constants.POOL_CONTROLLER_PRE_RENTAL_POOL_SPEED,PoolSpeed.HI.toString()), // // PoolControllerPostRentalConditioner(Constants.POOL_CONTROLLER_POST_RENTAL_CONDITIONER_ACTIVE,true), // PoolControllerPostRentalPoolSpeed(Constants.POOL_CONTROLLER_POST_RENTAL_POOL_SPEED,PoolSpeed.OFF.toString()), // sendCheckOutLinksEnabled(Constants.SEND_CHECK_OUT_LINKS_ENABLED, false), // guestAfterCheckOutMessageEnabled(Constants.GUEST_AFTER_CHECKOUT_MESSAGE_ENABLED, false), // // thermostatSeasonMode(Constants.THERMOSTAT_SEASON_MODE, "SUMMER"), // cleaningScheduleType(Constants.CLEANING_SCHEDULE_TYPE, "FULL"), // qaScheduleType(Constants.QA_SCHEDULE_TYPE, "FULL"); private String key; private Object value; private DefaultSetting(String key, Object value) { this.key = key; this.value = value; } public String getKey() { return key; } public Object getValue() { return value; } } public long getId() { return id; } public void setId(long id) { this.id = id; } public JSONObject getJsonSettingsObject() { return this.getJsonSettingsObject(null); } public JSONObject getJsonSettingsObject(String schema) { JSONObject Obj1; JSONObject merged = null; try { if (parent == null) { merged = new JSONObject(); } else { Obj1 = parent.getSettings().getJsonSettingsObject(schema); if(Obj1 != null) { String[] names = JSONObject.getNames(Obj1); if(names != null) merged = new JSONObject(Obj1, names); }else merged = new JSONObject(); } JSONObject Obj2 = getJsonObject(schema); if (JSONObject.getNames(Obj2) != null) { for (String key : JSONObject.getNames(Obj2)) { merged.put(key, Obj2.get(key)); } } return merged; } catch (JSONException e) { e.printStackTrace(); return new JSONObject(); } } public String getJsonSettings(String schema) { JSONObject Obj1; JSONObject merged = null; try { if (parent == null) { merged = new JSONObject(); } else { Obj1 = parent.getSettings().getJsonSettingsObject(schema); if(Obj1 != null) { String[] names = JSONObject.getNames(Obj1); if(names != null) merged = new JSONObject(Obj1, names); }else merged = new JSONObject(); } JSONObject Obj2 = getJsonObject(schema); if (JSONObject.getNames(Obj2) != null) { for (String key : JSONObject.getNames(Obj2)) { merged.put(key, Obj2.get(key)); } } return merged.toString(); } catch (JSONException e) { e.printStackTrace(); return "{}"; } } public void setJsonSettings(String json) { this.jsonSettings = json; } /** * Returns local Settings, not all the hierarchy. * @return */ public JSONObject getJsonObject() { return this.getJsonObject(null); } /** * Returns local Settings, not all the hierarchy. * @return */ public JSONObject getJsonObject(String schema) { try { JSONObject json = new JSONObject(StringEscapeUtils.unescapeJava(jsonSettings)); if (schema==null || schema.equals("")) return json; JSONObject result = new JSONObject(); for (@SuppressWarnings("unchecked") Iterator<String> iterator = json.keys(); iterator.hasNext();) { String key = iterator.next(); if (key.startsWith(schema+"_")) result.put(key, json.get(key)); } return result; } catch (JSONException e) { e.printStackTrace(); return null; } } public int getInt(String key) { return Integer.parseInt(getSetting(key)); } public double getDouble(String key) { return Double.parseDouble(getSetting(key)); } public String getString(String key) { return StringEscapeUtils.unescapeHtml4(getSetting(key)); } public void setSetting(String key, Object value) { try { JSONObject obj = getJsonObject(); if (value instanceof String) { String escaped = StringEscapeUtils.escapeHtml4(value.toString()); obj.put(key, escaped); } else { obj.put(key, value); } setJsonSettings(obj.toString()); } catch (JSONException e) { e.printStackTrace(); } } public void unsetSetting(String key) { JSONObject obj = getJsonObject(); if (obj.has(key)) obj.remove(key); setJsonSettings(obj.toString()); } public void updateSettings(JSONObject settings) throws APIException { this.updateSettings(settings, null); } public void updateSettings(JSONObject settings, String schema) throws APIException { for (@SuppressWarnings("unchecked") Iterator<String> keys = settings.keys(); keys.hasNext();) { String key = keys.next(); try { if (schema != null && !schema.equals("")) { if (key.startsWith(schema)) this.setSetting(key, settings.get(key)); else throw APIException.raise(APIErrors.SETTING_SCHEMA_ERROR); } else this.setSetting(key, settings.get(key)); } catch (JSONException e) { e.printStackTrace(); } } } public void unsetSettings(JSONObject settings) throws APIException { this.unsetSettings(settings, null); } public void unsetSettings(JSONObject settings, String schema) throws APIException { for (@SuppressWarnings("unchecked") Iterator<String> keys = settings.keys(); keys.hasNext();) { String key = keys.next(); if (schema != null && !schema.equals("")) { if (key.startsWith(schema)) this.unsetSetting(key); else throw APIException.raise(APIErrors.SETTING_SCHEMA_ERROR); } else this.unsetSetting(key); } } public void setString(String key, String value) { try { getJsonObject().put(key, value); } catch (JSONException e) { e.printStackTrace(); } } public boolean getBool(String key) { return Boolean.parseBoolean(getSetting(key, "false")); } public void setBool(String key, boolean value) { try { getJsonObject().put(key, value); } catch (JSONException e) { e.printStackTrace(); } } public JSONArray getArray(String key) { try { return new JSONArray(getSetting(key)); } catch (JSONException e) { e.printStackTrace(); } return null; } public JSONObject getObject(String key) { try { return new JSONObject(getSetting(key)); } catch (JSONException e) { e.printStackTrace(); } return null; } public boolean has(String key) { if (parent == null) return getJsonObject().has(key); else return getJsonObject().has(key) || parent.getSettings().has(key); } private String getSetting(String key) { return getSetting(key, null); } private String getSetting(String key, String defaultValue) { try { if (parent == null) if (!getJsonObject().has(key)) return defaultValue; else return getJsonObject().get(key).toString(); else return getJsonObject().has(key) ? getJsonObject().get(key).toString() : parent.getSettings().getSetting(key, defaultValue); } catch (JSONException e) { e.printStackTrace(); return null; } } public void setParent(SettingsPrototype parent) { this.parent = parent; } public SettingsPrototype getParent() { return parent; } }
/* * Copyright 2012 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.dht; import java.security.PublicKey; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import net.tomp2p.peers.Number160; import net.tomp2p.peers.Number320; import net.tomp2p.peers.Number480; import net.tomp2p.peers.Number640; import net.tomp2p.storage.Data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class StorageMemory implements Storage { public static final int DEFAULT_STORAGE_CHECK_INTERVAL= 60 * 1000; public static final int DEFAULT_MAX_VERSIONS= -1; private static final Logger LOG = LoggerFactory.getLogger(StorageMemory.class); // Core final private NavigableMap<Number640, Data> dataMap = new ConcurrentSkipListMap<Number640, Data>(); // Maintenance final private Map<Number640, Long> timeoutMap = new ConcurrentHashMap<Number640, Long>(); final private ConcurrentSkipListMap<Long, Set<Number640>> timeoutMapRev = new ConcurrentSkipListMap<Long, Set<Number640>>(); // Protection final private Map<Number320, PublicKey> protectedMap = new ConcurrentHashMap<Number320, PublicKey>(); final private Map<Number480, PublicKey> entryMap = new ConcurrentHashMap<Number480, PublicKey>(); // Responsibility final private Map<Number160, Number160> responsibilityMap = new ConcurrentHashMap<Number160, Number160>(); final private Map<Number160, Set<Number160>> responsibilityMapRev = new ConcurrentHashMap<Number160, Set<Number160>>(); final int storageCheckIntervalMillis; final int maxVersions; public StorageMemory() { this(DEFAULT_STORAGE_CHECK_INTERVAL, DEFAULT_MAX_VERSIONS); } public StorageMemory(int storageCheckIntervalMillis) { this(storageCheckIntervalMillis, DEFAULT_MAX_VERSIONS); } public StorageMemory(int storageCheckIntervalMillis, int maxVersions) { this.storageCheckIntervalMillis = storageCheckIntervalMillis; this.maxVersions = maxVersions; } // Core @Override public Data put(Number640 key, Data value) { final Data oldData = dataMap.put(key, value); if (maxVersions > 0) { NavigableMap<Number640, Data> versions = dataMap.subMap( new Number640(key.locationKey(), key.domainKey(), key.contentKey(), Number160.ZERO), true, new Number640(key.locationKey(), key.domainKey(), key.contentKey(), Number160.MAX_VALUE), true); while (!versions.isEmpty() && versions.firstKey().versionKey().timestamp() + maxVersions <= versions.lastKey().versionKey() .timestamp()) { Map.Entry<Number640, Data> entry = versions.pollFirstEntry(); Data removed = remove(entry.getKey(), false); if(removed != null) { removed.release(); } removeTimeout(entry.getKey()); } } return oldData; } @Override public Data get(Number640 key) { return dataMap.get(key); } @Override public boolean contains(Number640 key) { return dataMap.containsKey(key); } @Override public int contains(Number640 fromKey, Number640 toKey) { NavigableMap<Number640, Data> tmp = dataMap.subMap(fromKey, true, toKey, true); return tmp.size(); } @Override public Data remove(Number640 key, boolean returnData) { return dataMap.remove(key); } @Override public NavigableMap<Number640, Data> remove(Number640 fromKey, Number640 toKey) { NavigableMap<Number640, Data> tmp = dataMap.subMap(fromKey, true, toKey, true); // new TreeMap<Number640, Data>(tmp); is not possible as this may lead to no such element exception: // // java.util.NoSuchElementException: null // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapIter.advance(ConcurrentSkipListMap.java:3030) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3100) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3096) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2394) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2344) ~[na:1.7.0_60] // at java.util.TreeMap.<init>(TreeMap.java:195) ~[na:1.7.0_60] // at net.tomp2p.dht.StorageMemory.subMap(StorageMemory.java:119) ~[classes/:na] // // the reason is that the size in TreeMap.buildFromSorted is stored beforehand, then iteratated. If the size changes, // then you will call next() that returns null and an exception is thrown. final NavigableMap<Number640, Data> retVal = new ConcurrentSkipListMap<Number640, Data>(tmp); tmp.clear(); return retVal; } @Override public NavigableMap<Number640, Data> subMap(Number640 fromKey, Number640 toKey, int limit, boolean ascending) { final NavigableMap<Number640, Data> clone = ((ConcurrentSkipListMap<Number640, Data>)dataMap).clone(); final NavigableMap<Number640, Data> tmp = clone.subMap(fromKey, true, toKey, true); final NavigableMap<Number640, Data> retVal = new TreeMap<Number640, Data>(); if (limit < 0) { // new TreeMap<Number640, Data>(tmp); is not possible as this may lead to no such element exception: // // java.util.NoSuchElementException: null // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapIter.advance(ConcurrentSkipListMap.java:3030) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3100) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3096) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2394) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2344) ~[na:1.7.0_60] // at java.util.TreeMap.<init>(TreeMap.java:195) ~[na:1.7.0_60] // at net.tomp2p.dht.StorageMemory.subMap(StorageMemory.java:119) ~[classes/:na] // // the reason is that the size in TreeMap.buildFromSorted is stored beforehand, then iteratated. If the size changes, // then you will call next() that returns null and an exception is thrown. //for(Map.Entry<Number640, Data> entry:(ascending ? tmp : tmp.descendingMap()).entrySet()) { // retVal.put(entry.getKey(), entry.getValue()); //} return ascending ? tmp : tmp.descendingMap(); } else { Iterator<Map.Entry<Number640, Data>> iterator = ascending ? tmp.entrySet().iterator() : tmp .descendingMap().entrySet().iterator(); for (int i = 0; iterator.hasNext() && i < limit; i++) { Map.Entry<Number640, Data> entry = iterator.next(); retVal.put(entry.getKey(), entry.getValue()); } } return retVal; } @Override public NavigableMap<Number640, Data> map() { // new TreeMap<Number640, Data>(tmp); is not possible as this may lead to no such element exception: // // java.util.NoSuchElementException: null // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapIter.advance(ConcurrentSkipListMap.java:3030) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3100) ~[na:1.7.0_60] // at java.util.concurrent.ConcurrentSkipListMap$SubMap$SubMapEntryIterator.next(ConcurrentSkipListMap.java:3096) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2394) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2418) ~[na:1.7.0_60] // at java.util.TreeMap.buildFromSorted(TreeMap.java:2344) ~[na:1.7.0_60] // at java.util.TreeMap.<init>(TreeMap.java:195) ~[na:1.7.0_60] // at net.tomp2p.dht.StorageMemory.subMap(StorageMemory.java:119) ~[classes/:na] // // the reason is that the size in TreeMap.buildFromSorted is stored beforehand, then iteratated. If the size changes, // then you will call next() that returns null and an exception is thrown. final NavigableMap<Number640, Data> retVal = new TreeMap<Number640, Data>(); for(final Map.Entry<Number640, Data> entry:dataMap.entrySet()) { retVal.put(entry.getKey(), entry.getValue()); } return retVal; } // Maintenance @Override public void addTimeout(Number640 key, long expiration) { Long oldExpiration = timeoutMap.put(key, expiration); Set<Number640> tmp = putIfAbsent2(expiration, Collections.newSetFromMap(new ConcurrentHashMap<Number640, Boolean>())); tmp.add(key); if (oldExpiration == null) { return; } removeRevTimeout(key, oldExpiration); } @Override public void removeTimeout(Number640 key) { Long expiration = timeoutMap.remove(key); if (expiration == null) { return; } removeRevTimeout(key, expiration); } private void removeRevTimeout(Number640 key, Long expiration) { Set<Number640> tmp = timeoutMapRev.get(expiration); if (tmp != null) { tmp.remove(key); if (tmp.isEmpty()) { timeoutMapRev.remove(expiration); } } } @Override public Collection<Number640> subMapTimeout(long to) { SortedMap<Long, Set<Number640>> tmp = timeoutMapRev.subMap(0L, to); Collection<Number640> toRemove = new ArrayList<Number640>(); for (Set<Number640> set : tmp.values()) { toRemove.addAll(set); } return toRemove; } // Protection @Override public boolean protectDomain(Number320 key, PublicKey publicKey) { protectedMap.put(key, publicKey); return true; } @Override public boolean isDomainProtectedByOthers(Number320 key, PublicKey publicKey) { PublicKey other = protectedMap.get(key); if (other == null) { LOG.debug("domain {} not protected", key); return false; } final boolean retVal = !other.equals(publicKey); LOG.debug("domain {} protected: {}", key, retVal); return retVal; } private Set<Number640> putIfAbsent2(long expiration, Set<Number640> hashSet) { Set<Number640> timeouts = timeoutMapRev.putIfAbsent(expiration, hashSet); return timeouts == null ? hashSet : timeouts; } @Override public Number160 findPeerIDsForResponsibleContent(Number160 locationKey) { return responsibilityMap.get(locationKey); } @Override public Collection<Number160> findContentForResponsiblePeerID(Number160 peerID) { return responsibilityMapRev.get(peerID); } @Override public boolean updateResponsibilities(Number160 locationKey, Number160 peerId) { final Number160 oldPeerID = responsibilityMap.put(locationKey, peerId); final boolean hasChanged; if(oldPeerID != null) { if(oldPeerID.equals(peerId)) { hasChanged = false; } else { removeRevResponsibility(oldPeerID, locationKey); hasChanged = true; } } else { hasChanged = true; } Set<Number160> contentIDs = responsibilityMapRev.get(peerId); if(contentIDs == null) { contentIDs = new HashSet<Number160>(); responsibilityMapRev.put(peerId, contentIDs); } contentIDs.add(locationKey); LOG.debug("Update {} is responsible for key {}.", peerId, locationKey); return hasChanged; } @Override public void removeResponsibility(Number160 locationKey) { Number160 peerId = responsibilityMap.remove(locationKey); if(peerId != null) { removeRevResponsibility(peerId, locationKey); LOG.debug("Remove responsiblity for {}.", locationKey); } } private void removeRevResponsibility(Number160 peerId, Number160 locationKey) { Set<Number160> contentIDs = responsibilityMapRev.get(peerId); if (contentIDs != null) { contentIDs.remove(locationKey); if (contentIDs.isEmpty()) { responsibilityMapRev.remove(peerId); } } } // Misc @Override public void close() { dataMap.clear(); protectedMap.clear(); timeoutMap.clear(); timeoutMapRev.clear(); } @Override public boolean protectEntry(Number480 key, PublicKey publicKey) { entryMap.put(key, publicKey); return true; } @Override public boolean isEntryProtectedByOthers(Number480 key, PublicKey publicKey) { PublicKey other = entryMap.get(key); if (other == null) { return false; } return !other.equals(publicKey); } @Override public int storageCheckIntervalMillis() { return storageCheckIntervalMillis; } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.indexing.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.SmartList; import com.intellij.util.indexing.ValueContainer; import com.intellij.util.indexing.containers.ChangeBufferingList; import com.intellij.util.indexing.containers.IntIdsIterator; import com.intellij.util.io.DataExternalizer; import com.intellij.util.io.DataInputOutputUtil; import gnu.trove.THashMap; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.function.IntPredicate; /** * @author Eugene Zhuravlev */ @ApiStatus.Internal public final class ValueContainerImpl<Value> extends UpdatableValueContainer<Value> implements Cloneable{ private static final Logger LOG = Logger.getInstance(ValueContainerImpl.class); private static final Object myNullValue = new Object(); // there is no volatile as we modify under write lock and read under read lock // Most often (80%) we store 0 or one mapping, then we store them in two fields: myInputIdMapping, myInputIdMappingValue // when there are several value mapped, myInputIdMapping is ValueToInputMap<Value, Data> (it's actually just THashMap), myInputIdMappingValue = null private Object myInputIdMapping; private Object myInputIdMappingValue; @Override public void addValue(int inputId, Value value) { final Object fileSetObject = getFileSetObject(value); if (fileSetObject == null) { attachFileSetForNewValue(value, inputId); } else if (fileSetObject instanceof Integer) { int existingValue = ((Integer)fileSetObject).intValue(); if (existingValue != inputId) { ChangeBufferingList list = new ChangeBufferingList(); list.add(existingValue); list.add(inputId); resetFileSetForValue(value, list); } } else { ((ChangeBufferingList)fileSetObject).add(inputId); } } @Nullable private ValueToInputMap<Value> asMapping() { //noinspection unchecked return myInputIdMapping instanceof ValueToInputMap ? (ValueToInputMap<Value>)myInputIdMapping : null; } private Value asValue() { //noinspection unchecked return (Value)myInputIdMapping; } private Value nullValue() { //noinspection unchecked return (Value)myNullValue; } private void resetFileSetForValue(Value value, @NotNull Object fileSet) { if (value == null) value = nullValue(); Map<Value, Object> map = asMapping(); if (map == null) { myInputIdMappingValue = fileSet; } else { map.put(value, fileSet); } } @Override public int size() { return myInputIdMapping != null ? myInputIdMapping instanceof ValueToInputMap ? ((ValueToInputMap<?>)myInputIdMapping).size(): 1 : 0; } @Override public void removeAssociatedValue(int inputId) { if (myInputIdMapping == null) return; List<Object> fileSetObjects = null; List<Value> valueObjects = null; for (final InvertedIndexValueIterator<Value> valueIterator = getValueIterator(); valueIterator.hasNext();) { final Value value = valueIterator.next(); if (valueIterator.getValueAssociationPredicate().test(inputId)) { if (fileSetObjects == null) { fileSetObjects = new SmartList<>(); valueObjects = new SmartList<>(); } else if (IndexDebugProperties.DEBUG) { LOG.error("Expected only one value per-inputId for " + IndexDebugProperties.DEBUG_INDEX_ID.get(), String.valueOf(fileSetObjects.get(0)), String.valueOf(value)); } fileSetObjects.add(valueIterator.getFileSetObject()); valueObjects.add(value); } } if (fileSetObjects != null) { for (int i = 0, len = valueObjects.size(); i < len; ++i) { removeValue(inputId, fileSetObjects.get(i), valueObjects.get(i)); } } } void removeValue(int inputId, Value value) { removeValue(inputId, getFileSetObject(value), value); } private void removeValue(int inputId, Object fileSet, Value value) { if (fileSet == null) { return; } if (fileSet instanceof ChangeBufferingList) { final ChangeBufferingList changesList = (ChangeBufferingList)fileSet; changesList.remove(inputId); if (!changesList.isEmpty()) return; } else if (fileSet instanceof Integer) { if (((Integer)fileSet).intValue() != inputId) { return; } } Map<Value, Object> mapping = asMapping(); if (mapping == null) { myInputIdMapping = null; myInputIdMappingValue = null; } else { mapping.remove(value); if (mapping.size() == 1) { Value mappingValue = mapping.keySet().iterator().next(); myInputIdMapping = mappingValue; Object inputIdMappingValue = mapping.get(mappingValue); // prevent NPEs on file set due to Value class being mutable or having inconsistent equals wrt disk persistence // (instance that is serialized and new instance created with deserialization from the same bytes are expected to be equal) myInputIdMappingValue = inputIdMappingValue != null ? inputIdMappingValue : new Integer(0); } } } @NotNull @Override public InvertedIndexValueIterator<Value> getValueIterator() { if (myInputIdMapping == null) { //noinspection unchecked return (InvertedIndexValueIterator<Value>)EmptyValueIterator.INSTANCE; } Map<Value, Object> mapping = asMapping(); if (mapping == null) { return new InvertedIndexValueIterator<Value>() { private Value value = asValue(); @NotNull @Override public IntIterator getInputIdsIterator() { return getIntIteratorOutOfFileSetObject(getFileSetObject()); } @NotNull @Override public IntPredicate getValueAssociationPredicate() { return getPredicateOutOfFileSetObject(getFileSetObject()); } @Override public Object getFileSetObject() { return myInputIdMappingValue; } @Override public boolean hasNext() { return value != null; } @Override public Value next() { Value next = value; if (next == myNullValue) next = null; value = null; return next; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } else { return new InvertedIndexValueIterator<Value>() { private Value current; private Object currentValue; private final Iterator<Map.Entry<Value, Object>> iterator = mapping.entrySet().iterator(); @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Value next() { Map.Entry<Value, Object> entry = iterator.next(); current = entry.getKey(); Value next = current; currentValue = entry.getValue(); if (next == myNullValue) next = null; return next; } @Override public void remove() { throw new UnsupportedOperationException(); } @NotNull @Override public IntIterator getInputIdsIterator() { return getIntIteratorOutOfFileSetObject(getFileSetObject()); } @NotNull @Override public IntPredicate getValueAssociationPredicate() { return getPredicateOutOfFileSetObject(getFileSetObject()); } @Override public Object getFileSetObject() { if (current == null) throw new IllegalStateException(); return currentValue; } }; } } private static class EmptyValueIterator<Value> implements InvertedIndexValueIterator<Value> { private static final EmptyValueIterator<Object> INSTANCE = new EmptyValueIterator<>(); @NotNull @Override public ValueContainer.IntIterator getInputIdsIterator() { throw new IllegalStateException(); } @NotNull @Override public IntPredicate getValueAssociationPredicate() { throw new IllegalStateException(); } @Override public Object getFileSetObject() { throw new IllegalStateException(); } @Override public boolean hasNext() { return false; } @Override public Value next() { throw new NoSuchElementException(); } @Override public void remove() { throw new IllegalStateException(); } } private static @NotNull IntPredicate getPredicateOutOfFileSetObject(@Nullable Object input) { if (input == null) return EMPTY_PREDICATE; if (input instanceof Integer) { final int singleId = (Integer)input; return id -> id == singleId; } return ((ChangeBufferingList)input).intPredicate(); } @NotNull private static ValueContainer.IntIterator getIntIteratorOutOfFileSetObject(@Nullable Object input) { if (input == null) return EMPTY_ITERATOR; if (input instanceof Integer) { return new SingleValueIterator(((Integer)input).intValue()); } return ((ChangeBufferingList)input).intIterator(); } private Object getFileSetObject(Value value) { if (myInputIdMapping == null) return null; value = value != null ? value : nullValue(); if (myInputIdMapping == value || // myNullValue is Object myInputIdMapping.equals(value)) { return myInputIdMappingValue; } Map<Value, Object> mapping = asMapping(); return mapping == null ? null : mapping.get(value); } @Override public ValueContainerImpl<Value> clone() { try { //noinspection unchecked ValueContainerImpl<Value> clone = (ValueContainerImpl<Value>)super.clone(); ValueToInputMap<Value> mapping = asMapping(); if (mapping != null) { final ValueToInputMap<Value> cloned = mapping.clone(); cloned.forEachEntry((key, val) -> { if (val instanceof ChangeBufferingList) { cloned.put(key, ((ChangeBufferingList)val).clone()); } return true; }); clone.myInputIdMapping = cloned; } else if (myInputIdMappingValue instanceof ChangeBufferingList) { clone.myInputIdMappingValue = ((ChangeBufferingList)myInputIdMappingValue).clone(); } return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } public static final IntIdsIterator EMPTY_ITERATOR = new IntIdsIterator() { @Override public boolean hasNext() { return false; } @Override public int next() { return 0; } @Override public int size() { return 0; } @Override public boolean hasAscendingOrder() { return true; } @Override public IntIdsIterator createCopyInInitialState() { return this; } }; @Nullable private ChangeBufferingList ensureFileSetCapacityForValue(Value value, int count) { if (count <= 1) return null; Object fileSetObject = getFileSetObject(value); if (fileSetObject != null) { if (fileSetObject instanceof Integer) { ChangeBufferingList list = new ChangeBufferingList(count + 1); list.add(((Integer)fileSetObject).intValue()); resetFileSetForValue(value, list); return list; } if (fileSetObject instanceof ChangeBufferingList) { ChangeBufferingList list = (ChangeBufferingList)fileSetObject; list.ensureCapacity(count); return list; } return null; } final ChangeBufferingList fileSet = new ChangeBufferingList(count); attachFileSetForNewValue(value, fileSet); return fileSet; } private void attachFileSetForNewValue(Value value, Object fileSet) { value = value != null ? value : nullValue(); if (myInputIdMapping != null) { Map<Value, Object> mapping = asMapping(); if (mapping == null) { Value oldMapping = asValue(); myInputIdMapping = mapping = new ValueToInputMap<>(2); mapping.put(oldMapping, myInputIdMappingValue); myInputIdMappingValue = null; } mapping.put(value, fileSet); } else { myInputIdMapping = value; myInputIdMappingValue = fileSet; } } @Override public void saveTo(DataOutput out, DataExternalizer<? super Value> externalizer) throws IOException { DataInputOutputUtil.writeINT(out, size()); for (final InvertedIndexValueIterator<Value> valueIterator = getValueIterator(); valueIterator.hasNext();) { final Value value = valueIterator.next(); externalizer.save(out, value); Object fileSetObject = valueIterator.getFileSetObject(); if (fileSetObject instanceof Integer) { DataInputOutputUtil.writeINT(out, (Integer)fileSetObject); // most common 90% case during index building } else { // serialize positive file ids with delta encoding ChangeBufferingList originalInput = (ChangeBufferingList)fileSetObject; IntIdsIterator intIterator = originalInput.sortedIntIterator(); if (IndexDebugProperties.DEBUG) LOG.assertTrue(intIterator.hasAscendingOrder()); if (intIterator.size() == 1) { DataInputOutputUtil.writeINT(out, intIterator.next()); } else { DataInputOutputUtil.writeINT(out, -intIterator.size()); int prev = 0; while (intIterator.hasNext()) { int fileId = intIterator.next(); DataInputOutputUtil.writeINT(out, fileId - prev); prev = fileId; } } } } } static final int NUMBER_OF_VALUES_THRESHOLD = 20; public void readFrom(@NotNull DataInputStream stream, @NotNull DataExternalizer<? extends Value> externalizer, @NotNull ValueContainerInputRemapping remapping) throws IOException { FileId2ValueMapping<Value> mapping = null; while (stream.available() > 0) { final int valueCount = DataInputOutputUtil.readINT(stream); if (valueCount < 0) { // ChangeTrackingValueContainer marked inputId as invalidated, see ChangeTrackingValueContainer.saveTo @NotNull Object inputIds = remapping.remap(-valueCount); if (mapping == null && size() > NUMBER_OF_VALUES_THRESHOLD) { // avoid O(NumberOfValues) mapping = new FileId2ValueMapping<>(this); } boolean doCompact = false; if (inputIds instanceof int[]) { for (int inputId : (int[])inputIds) { if(mapping != null) { if (mapping.removeFileId(inputId)) doCompact = true; } else { removeAssociatedValue(inputId); doCompact = true; } } } else { int inputId = (int)inputIds; if(mapping != null) { if (mapping.removeFileId(inputId)) doCompact = true; } else { removeAssociatedValue(inputId); doCompact = true; } } if (doCompact) setNeedsCompacting(true); } else { for (int valueIdx = 0; valueIdx < valueCount; valueIdx++) { final Value value = externalizer.read(stream); int idCountOrSingleValue = DataInputOutputUtil.readINT(stream); if (idCountOrSingleValue > 0) { @NotNull Object inputIds = remapping.remap(idCountOrSingleValue); if (inputIds instanceof int[]) { for (int inputId : (int[])inputIds) { addValue(inputId, value); if (mapping != null) mapping.associateFileIdToValue(inputId, value); } } else { int inputId = (int)inputIds; addValue(inputId, value); if (mapping != null) mapping.associateFileIdToValue(inputId, value); } } else { idCountOrSingleValue = -idCountOrSingleValue; ChangeBufferingList changeBufferingList = ensureFileSetCapacityForValue(value, idCountOrSingleValue); int prev = 0; for (int i = 0; i < idCountOrSingleValue; i++) { final int id = DataInputOutputUtil.readINT(stream); @NotNull Object inputIds = remapping.remap(prev + id); if (inputIds instanceof int[]) { for (int inputId : (int[])inputIds) { if (changeBufferingList != null) changeBufferingList.add(inputId); else addValue(inputId, value); if (mapping != null) mapping.associateFileIdToValue(inputId, value); } } else { int inputId = (int)inputIds; if (changeBufferingList != null) changeBufferingList.add(inputId); else addValue(inputId, value); if (mapping != null) mapping.associateFileIdToValue(inputId, value); } prev += id; } } } } } } private static final class SingleValueIterator implements IntIdsIterator { private final int myValue; private boolean myValueRead; private SingleValueIterator(int value) { myValue = value; } @Override public boolean hasNext() { return !myValueRead; } @Override public int next() { myValueRead = true; return myValue; } @Override public int size() { return 1; } @Override public boolean hasAscendingOrder() { return true; } @Override public IntIdsIterator createCopyInInitialState() { return new SingleValueIterator(myValue); } } private static final IntPredicate EMPTY_PREDICATE = __ -> false; // a class to distinguish a difference between user-value with THashMap type and internal value container private static final class ValueToInputMap<Value> extends THashMap<Value, Object> { ValueToInputMap(int size) { super(size); } @Override public ValueToInputMap<Value> clone() { return (ValueToInputMap<Value>)super.clone(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.operator; import com.google.common.io.Closer; import io.prestosql.operator.exchange.LocalPartitionGenerator; import io.prestosql.spi.Page; import io.prestosql.spi.PageBuilder; import io.prestosql.spi.type.Type; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.NotThreadSafe; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.lang.Integer.numberOfTrailingZeros; import static java.lang.Math.toIntExact; @NotThreadSafe public class PartitionedLookupSource implements LookupSource { public static TrackingLookupSourceSupplier createPartitionedLookupSourceSupplier(List<Supplier<LookupSource>> partitions, List<Type> hashChannelTypes, boolean outer) { if (outer) { OuterPositionTracker.Factory outerPositionTrackerFactory = new OuterPositionTracker.Factory(partitions); return new TrackingLookupSourceSupplier() { @Override public LookupSource getLookupSource() { return new PartitionedLookupSource( partitions.stream() .map(Supplier::get) .collect(toImmutableList()), hashChannelTypes, Optional.of(outerPositionTrackerFactory.create())); } @Override public OuterPositionIterator getOuterPositionIterator() { return outerPositionTrackerFactory.getOuterPositionIterator(); } }; } else { return TrackingLookupSourceSupplier.nonTracking( () -> new PartitionedLookupSource( partitions.stream() .map(Supplier::get) .collect(toImmutableList()), hashChannelTypes, Optional.empty())); } } private final LookupSource[] lookupSources; private final LocalPartitionGenerator partitionGenerator; private final int partitionMask; private final int shiftSize; @Nullable private final OuterPositionTracker outerPositionTracker; private boolean closed; private PartitionedLookupSource(List<? extends LookupSource> lookupSources, List<Type> hashChannelTypes, Optional<OuterPositionTracker> outerPositionTracker) { this.lookupSources = lookupSources.toArray(new LookupSource[lookupSources.size()]); // this generator is only used for getJoinPosition without a rawHash and in this case // the hash channels are always packed in a page without extra columns int[] hashChannels = new int[hashChannelTypes.size()]; for (int i = 0; i < hashChannels.length; i++) { hashChannels[i] = i; } this.partitionGenerator = new LocalPartitionGenerator(new InterpretedHashGenerator(hashChannelTypes, hashChannels), lookupSources.size()); this.partitionMask = lookupSources.size() - 1; this.shiftSize = numberOfTrailingZeros(lookupSources.size()) + 1; this.outerPositionTracker = outerPositionTracker.orElse(null); } @Override public boolean isEmpty() { return Arrays.stream(lookupSources).allMatch(LookupSource::isEmpty); } @Override public int getChannelCount() { return lookupSources[0].getChannelCount(); } @Override public long getJoinPositionCount() { return Arrays.stream(lookupSources) .mapToLong(LookupSource::getJoinPositionCount) .sum(); } @Override public long getInMemorySizeInBytes() { return Arrays.stream(lookupSources).mapToLong(LookupSource::getInMemorySizeInBytes).sum(); } @Override public long getJoinPosition(int position, Page hashChannelsPage, Page allChannelsPage) { return getJoinPosition(position, hashChannelsPage, allChannelsPage, partitionGenerator.getRawHash(hashChannelsPage, position)); } @Override public long getJoinPosition(int position, Page hashChannelsPage, Page allChannelsPage, long rawHash) { int partition = partitionGenerator.getPartition(rawHash); LookupSource lookupSource = lookupSources[partition]; long joinPosition = lookupSource.getJoinPosition(position, hashChannelsPage, allChannelsPage, rawHash); if (joinPosition < 0) { return joinPosition; } return encodePartitionedJoinPosition(partition, toIntExact(joinPosition)); } @Override public long getNextJoinPosition(long currentJoinPosition, int probePosition, Page allProbeChannelsPage) { int partition = decodePartition(currentJoinPosition); long joinPosition = decodeJoinPosition(currentJoinPosition); LookupSource lookupSource = lookupSources[partition]; long nextJoinPosition = lookupSource.getNextJoinPosition(joinPosition, probePosition, allProbeChannelsPage); if (nextJoinPosition < 0) { return nextJoinPosition; } return encodePartitionedJoinPosition(partition, toIntExact(nextJoinPosition)); } @Override public boolean isJoinPositionEligible(long currentJoinPosition, int probePosition, Page allProbeChannelsPage) { int partition = decodePartition(currentJoinPosition); long joinPosition = decodeJoinPosition(currentJoinPosition); LookupSource lookupSource = lookupSources[partition]; return lookupSource.isJoinPositionEligible(joinPosition, probePosition, allProbeChannelsPage); } @Override public void appendTo(long partitionedJoinPosition, PageBuilder pageBuilder, int outputChannelOffset) { int partition = decodePartition(partitionedJoinPosition); int joinPosition = decodeJoinPosition(partitionedJoinPosition); lookupSources[partition].appendTo(joinPosition, pageBuilder, outputChannelOffset); if (outerPositionTracker != null) { outerPositionTracker.positionVisited(partition, joinPosition); } } @Override public long joinPositionWithinPartition(long joinPosition) { return decodeJoinPosition(joinPosition); } @Override public void close() { if (closed) { return; } try (Closer closer = Closer.create()) { if (outerPositionTracker != null) { closer.register(outerPositionTracker::commit); } Arrays.stream(lookupSources).forEach(closer::register); } catch (IOException e) { throw new UncheckedIOException(e); } closed = true; } private int decodePartition(long partitionedJoinPosition) { return (int) (partitionedJoinPosition & partitionMask); } private int decodeJoinPosition(long partitionedJoinPosition) { return toIntExact(partitionedJoinPosition >>> shiftSize); } private long encodePartitionedJoinPosition(int partition, int joinPosition) { return (((long) joinPosition) << shiftSize) | (partition); } private static class PartitionedLookupOuterPositionIterator implements OuterPositionIterator { private final LookupSource[] lookupSources; private final boolean[][] visitedPositions; @GuardedBy("this") private int currentSource; @GuardedBy("this") private int currentPosition; public PartitionedLookupOuterPositionIterator(LookupSource[] lookupSources, boolean[][] visitedPositions) { this.lookupSources = lookupSources; this.visitedPositions = visitedPositions; } @Override public synchronized boolean appendToNext(PageBuilder pageBuilder, int outputChannelOffset) { while (currentSource < lookupSources.length) { while (currentPosition < visitedPositions[currentSource].length) { if (!visitedPositions[currentSource][currentPosition]) { lookupSources[currentSource].appendTo(currentPosition, pageBuilder, outputChannelOffset); currentPosition++; return true; } currentPosition++; } currentPosition = 0; currentSource++; } return false; } } /** * Each LookupSource has it's own copy of OuterPositionTracker instance. * Each of those OuterPositionTracker must be committed after last write * and before first read. * <p> * All instances share visitedPositions array, but it is safe because each thread * starts with visitedPositions filled with false values and marks only some positions * to true. Since we don't care what will be the order of those writes to * visitedPositions, writes can be without synchronization. * <p> * Memory visibility between last writes in commit() and first read in * getVisitedPositions() is guaranteed by accessing AtomicLong referenceCount * variables in those two methods. */ private static class OuterPositionTracker { public static class Factory { private final LookupSource[] lookupSources; private final boolean[][] visitedPositions; private final AtomicBoolean finished = new AtomicBoolean(); private final AtomicLong referenceCount = new AtomicLong(); public Factory(List<Supplier<LookupSource>> partitions) { this.lookupSources = partitions.stream() .map(Supplier::get) .toArray(LookupSource[]::new); visitedPositions = Arrays.stream(this.lookupSources) .map(LookupSource::getJoinPositionCount) .map(Math::toIntExact) .map(boolean[]::new) .toArray(boolean[][]::new); } public OuterPositionTracker create() { return new OuterPositionTracker(visitedPositions, finished, referenceCount); } public OuterPositionIterator getOuterPositionIterator() { // touching atomic values ensures memory visibility between commit and getVisitedPositions verify(referenceCount.get() == 0); finished.set(true); return new PartitionedLookupOuterPositionIterator(lookupSources, visitedPositions); } } private final boolean[][] visitedPositions; // shared across multiple operators/drivers private final AtomicBoolean finished; // shared across multiple operators/drivers private final AtomicLong referenceCount; // shared across multiple operators/drivers private boolean written; // unique per each operator/driver private OuterPositionTracker(boolean[][] visitedPositions, AtomicBoolean finished, AtomicLong referenceCount) { this.visitedPositions = visitedPositions; this.finished = finished; this.referenceCount = referenceCount; } /** * No synchronization here, because it would be very expensive. Check comment above. */ public void positionVisited(int partition, int position) { if (!written) { written = true; verify(!finished.get()); referenceCount.incrementAndGet(); } visitedPositions[partition][position] = true; } public void commit() { if (written) { // touching atomic values ensures memory visibility between commit and getVisitedPositions referenceCount.decrementAndGet(); } } } }
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Traces.Component; import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException; import com.google.security.zynamics.binnavi.Gui.CProgressDialog; import com.google.security.zynamics.binnavi.Gui.Debug.EventLists.CDebugEventListPanel; import com.google.security.zynamics.binnavi.Gui.Debug.EventLists.CEventListTableMenu; import com.google.security.zynamics.binnavi.Gui.Debug.EventLists.CEventTableMenu; import com.google.security.zynamics.binnavi.Gui.FilterPanel.CTablePanel; import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.CAbstractNodeComponent; import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Traces.Component.Help.CTracesViewsTableHelp; import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Nodes.Views.Help.CViewFilterHelp; import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.filters.CViewFilterCreator; import com.google.security.zynamics.binnavi.Gui.errordialog.NaviErrorDialog; import com.google.security.zynamics.binnavi.debug.models.trace.TraceList; import com.google.security.zynamics.binnavi.debug.models.trace.interfaces.ITraceEvent; import com.google.security.zynamics.binnavi.disassembly.CProjectContainer; import com.google.security.zynamics.binnavi.disassembly.UnrelocatedAddress; import com.google.security.zynamics.binnavi.disassembly.INaviModule; import com.google.security.zynamics.binnavi.disassembly.views.INaviView; import com.google.security.zynamics.binnavi.disassembly.views.IViewContainer; import com.google.security.zynamics.zylib.gui.ProgressDialogs.CEndlessHelperThread; import com.google.security.zynamics.zylib.types.lists.FilledList; import com.google.security.zynamics.zylib.types.lists.IFilledList; import java.awt.BorderLayout; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.List; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTable; import javax.swing.JTree; import javax.swing.SwingUtilities; import javax.swing.border.TitledBorder; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; /** * Component that is shown on the right side of the main window when a Debug Traces node is * selected. */ public final class CTracesNodeComponent extends CAbstractNodeComponent { /** * Used for serialization. */ private static final long serialVersionUID = 2169183737568464057L; /** * Provides the trace information shown in the component. */ private final IViewContainer m_container; /** * Panel where the trace information is shown. */ private final CDebugEventListPanel m_tracesPanel; /** * Model of the table that shows what views are relevant for a trace. */ private final CArbitraryViewsModel m_model = new CArbitraryViewsModel(); /** * Listens on changes in the trace list. */ private final ListSelectionListener m_listener = new InternalSelectionListener(); /** * Creates a new traces node component. * * @param projectTree Project tree of the main window. * @param container Provides the trace information shown in the component. */ public CTracesNodeComponent(final JTree projectTree, final IViewContainer container) { super(new BorderLayout()); Preconditions.checkNotNull(projectTree, "IE02007: Project tree argument can not be null"); m_container = Preconditions.checkNotNull(container, "IE02008: Container argument can not be null"); setBorder(new TitledBorder("Debug Traces")); m_tracesPanel = new CDebugEventListPanel(m_container.getTraceProvider()); final CArbitraryViewsTable table = new CArbitraryViewsTable(projectTree, m_model, container, new CTracesViewsTableHelp()); final JPanel lowerPanel = new CTablePanel<INaviView>(table, new CViewFilterCreator(container), new CViewFilterHelp()); lowerPanel.setBorder(new TitledBorder("Views")); lowerPanel.add(new JScrollPane(table)); final JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, true, m_tracesPanel, lowerPanel); add(splitPane, BorderLayout.CENTER); splitPane.setResizeWeight(0.5); m_tracesPanel.getTracesTable().getSelectionModel().addListSelectionListener(m_listener); m_tracesPanel.getTracesTable().addMouseListener(new InternalTraceTableListener()); m_tracesPanel.getEventsTable().addMouseListener(new InternalEventsTableListener()); } /** * Shows the views that belong to a trace in the table in the lower half of the component. * * @param trace The trace list. * * @throws CouldntLoadDataException */ private void showRelevantViews(final TraceList trace) throws CouldntLoadDataException { final IFilledList<UnrelocatedAddress> addresses = new FilledList<UnrelocatedAddress>(); for (final ITraceEvent traceEvent : trace) { addresses.add(traceEvent.getOffset().getAddress()); } final List<INaviView> views = m_container.getViewsWithAddresses(addresses, false); if (m_container instanceof CProjectContainer) { for (final INaviModule module : m_container.getModules()) { if (module.isLoaded()) { views.addAll(module.getViewsWithAddresses(addresses, false)); } } } m_model.setViews(views); } @Override public void dispose() { m_tracesPanel.dispose(); } /** * Mouse handler for the debug trace events table. */ private class InternalEventsTableListener extends MouseAdapter { /** * Converts row indices into trace objects. * * @param rows The row indices to convert. * * @return The corresponding trace object. */ private List<ITraceEvent> getTraces(final int[] rows) { final List<ITraceEvent> events = new ArrayList<ITraceEvent>(); for (final int row : rows) { events.add(m_tracesPanel.getEventsTable().getTreeTableModel().getEvents().get(row)); } return events; } /** * Shows a popup menu for a given mouse event. * * @param event The mouse event that triggered the popup menu. */ private void showPopupMenu(final MouseEvent event) { final int[] rows = m_tracesPanel.getEventsTable().getConvertedSelectedRows(); final List<ITraceEvent> traces = getTraces(rows); final CEventTableMenu menu = new CEventTableMenu(m_tracesPanel.getEventsTable(), traces); menu.show(m_tracesPanel.getEventsTable(), event.getX(), event.getY()); } @Override public void mousePressed(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } } @Override public void mouseReleased(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } } } /** * Listens on changes in the trace list. */ private class InternalSelectionListener implements ListSelectionListener { @Override public void valueChanged(final ListSelectionEvent event) { final int first = m_tracesPanel.getTracesTable().getSelectionModel().getMinSelectionIndex(); if (first == -1) { m_model.setViews(new FilledList<INaviView>()); return; } final boolean single = first == m_tracesPanel.getTracesTable().getSelectionModel().getMaxSelectionIndex(); if (single) { final JTable table = m_tracesPanel.getTracesTable(); final TraceList list = m_tracesPanel.getTracesTable().getTreeTableModel().getTraces() .get(table.convertRowIndexToModel(first)); final LoadRelevantViewsThread thread = new LoadRelevantViewsThread(list); CProgressDialog.showEndless(SwingUtilities.getWindowAncestor(getParent()), "Loading views that belong to the selected trace", thread); if (thread.getException() != null) { final String innerMessage = "E00042: " + "Could not load relevant views"; final String innerDescription = CUtilityFunctions.createDescription( String.format("BinNavi could not load the views that belong to the trace '%s'.", list.getName()), new String[] {"There was a problem with the database connection."}, new String[] {"The views that belong to the trace can not be shown."}); NaviErrorDialog.show(SwingUtilities.getWindowAncestor(getParent()), innerMessage, innerDescription, thread.getException()); } } } } /** * Handles mouse-clicks on the table. */ private class InternalTraceTableListener extends MouseAdapter { /** * Shows a popup menu for a given mouse event. * * @param event The event that triggered the popup menu. */ private void showPopupMenu(final MouseEvent event) { final JTable traceTable = m_tracesPanel.getTracesTable(); final int mouseRow = traceTable.rowAtPoint(event.getPoint()); if (mouseRow != -1) { final int[] rows = traceTable.getSelectedRows(); if (Ints.asList(rows).indexOf(mouseRow) != -1) { traceTable.setRowSelectionInterval(mouseRow, mouseRow); } } // Make sure at least one row is selected final int minIndex = m_tracesPanel.getTracesTable().getSelectionModel().getMinSelectionIndex(); if (minIndex != -1) { final JPopupMenu popupMenu = new CEventListTableMenu( (JFrame) SwingUtilities.getWindowAncestor(CTracesNodeComponent.this), m_tracesPanel.getTracesTable(), m_container.getTraceProvider()); popupMenu.show(m_tracesPanel.getTracesTable(), event.getX(), event.getY()); } } @Override public void mousePressed(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } } @Override public void mouseReleased(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } } } /** * Background thread for loading views while a progress dialog is active. */ private class LoadRelevantViewsThread extends CEndlessHelperThread { /** * Trace list for which the relevant views are loaded. */ private final TraceList m_list; /** * Creates a new thread object. * * @param list Trace list for which the relevant views are loaded. */ public LoadRelevantViewsThread(final TraceList list) { m_list = list; } @Override protected void runExpensiveCommand() throws Exception { showRelevantViews(m_list); } @Override public void closeRequested() { finish(); } } }
/* * CategoryImpl.java * Created by: Scott A. Roehrig * Created on: Jul 8, 2016 */ package org.apache.bazaar; import java.util.Collections; import java.util.HashSet; import java.util.Locale; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.EntityTransaction; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.apache.bazaar.logging.Logger; import org.apache.bazaar.nls.Messages; import org.apache.bazaar.persistence.EntityManagerFactory; import org.apache.bazaar.persistence.config.Configuration; import org.apache.bazaar.version.AbstractVersionable; import org.hibernate.envers.Audited; /** * CategoryImpl implements {@link Category} to provide a concrete * implementation. */ @Entity(name = org.apache.bazaar.persistence.config.Configuration.CATEGORY_ENTITY_NAME) @Table(name = org.apache.bazaar.persistence.config.Configuration.CATEGORY_TABLE_NAME, schema = org.apache.bazaar.persistence.config.Configuration.DATABASE_SCHEMA_NAME) // @PrimaryKeyJoinColumn(name = Configuration.IDENTIFIABLE_COLUMN_NAME) @Audited public class CategoryImpl extends AbstractVersionable implements Category { // declare members private static final long serialVersionUID = 8868571475475761876L; private static final Messages MESSAGES = Messages.newInstance(Locale.getDefault()); @ManyToOne(targetEntity = CategoryImpl.class, optional = false) @JoinColumn(name = "PARENT", referencedColumnName = org.apache.bazaar.persistence.config.Configuration.IDENTIFIABLE_COLUMN_NAME, nullable = false) private Category parent; @Column(name = "NAME", nullable = false, updatable = true, length = 255) private String name; @Column(name = "DESCRIPTION", nullable = false, updatable = true, length = 255) private String description; @OneToMany(targetEntity = CategoryImpl.class, mappedBy = "parent", fetch = FetchType.EAGER, cascade = { CascadeType.MERGE, CascadeType.REFRESH, CascadeType.REMOVE }) private Set<Category> children; // declare constructors /** * Constructor for CategoryImpl */ protected CategoryImpl() { super(); this.children = new HashSet<Category>(10); } /** * Constructor for CategoryImpl * * @param name The name of category * @param description The description of category * @param parent The parent category */ CategoryImpl(@NotNull @Size(min = 1, max = 255) final String name, @NotNull @Size(min = 1, max = 255) final String description, @NotNull final Category parent) { this(); this.name = name; this.description = description; this.parent = parent; ((CategoryImpl)this.parent).children.add(this); } // declare methods /* * (non-Javadoc) * * @see org.apache.bazaar.AbstractPersistable#processException(java.lang. * Exception) */ @Override protected void processException(final Exception exception) throws BazaarException { if (exception instanceof EntityNotFoundException) { throw new CategoryNotFoundException(exception.getLocalizedMessage(), exception); } super.processException(exception); } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#getName() */ @Override public String getName() { return this.name; } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#setName(java.lang.String) */ @Override public void setName(final String name) { this.name = name; } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#getDescription() */ @Override public String getDescription() { return this.description; } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#setDescription(java.lang.String) */ @Override public void setDescription(final String description) { this.description = description; } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#getParent() */ @Override public Category getParent() { return this.parent; } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#setParent(org.apache.bazaar.Category) */ @Override public void setParent(final Category parent) { this.parent = parent; ((CategoryImpl)parent).children.add(this); } /* * (non-Javadoc) * * @see org.apache.bazaar.Category#getChildren() */ @Override public Set<Category> getChildren() throws BazaarException { return Collections.unmodifiableSet(this.children); } /* * (non-Javadoc) * * @see org.apache.bazaar.AbstractPersistable#delete() */ @Override public void delete() throws BazaarException { // override to handle removal of this from parents children AbstractPersistable.LOGGER.entering("delete"); final EntityManager manager = EntityManagerFactory.newInstance().createEntityManager(); final EntityTransaction transaction = manager.getTransaction(); try { transaction.begin(); manager.remove(manager.merge(this)); transaction.commit(); ((CategoryImpl)this.parent).children.remove(this); } finally { manager.close(); } AbstractPersistable.LOGGER.exiting("delete"); } /* * (non-Javadoc) * * @see org.apache.bazaar.AbstractPersistable#persist() */ @Override public void persist() throws BazaarException { // check for this equals parent and fail if not // root category if (this.equals(this.parent) && !this.getIdentifier().getValue() .equals(Configuration.newInstance().getProperty(org.apache.bazaar.config.Configuration.ROOT_CATEGORY_IDENTIFIER)) ? true : false) { throw new BazaarException( CategoryImpl.MESSAGES.findMessage(Messages.UNABLE_TO_CREATE_CATEGORY)); } super.persist(); } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return Logger.toString(this, new Object[] { this.getIdentifier(), this.name, this.description, this.parent.getIdentifier() }); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.Session; import com.facebook.presto.common.QualifiedObjectName; import com.facebook.presto.common.type.Type; import com.facebook.presto.security.AccessControl; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.TableHandle; import com.facebook.presto.spi.function.FunctionHandle; import com.facebook.presto.spi.security.Identity; import com.facebook.presto.sql.tree.ExistsPredicate; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.GroupingOperation; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.InPredicate; import com.facebook.presto.sql.tree.Join; import com.facebook.presto.sql.tree.LambdaArgumentDeclaration; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.NodeRef; import com.facebook.presto.sql.tree.OrderBy; import com.facebook.presto.sql.tree.QuantifiedComparisonExpression; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.QuerySpecification; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.SampledRelation; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.SubqueryExpression; import com.facebook.presto.sql.tree.Table; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.util.ArrayDeque; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import static com.facebook.presto.SystemSessionProperties.isCheckAccessControlOnUtilizedColumnsOnly; import static com.facebook.presto.metadata.MetadataUtil.toSchemaTableName; import static com.facebook.presto.sql.analyzer.Analysis.MaterializedViewAnalysisState.NOT_VISITED; import static com.facebook.presto.sql.analyzer.Analysis.MaterializedViewAnalysisState.VISITED; import static com.facebook.presto.sql.analyzer.Analysis.MaterializedViewAnalysisState.VISITING; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Multimaps.forMap; import static com.google.common.collect.Multimaps.unmodifiableMultimap; import static java.lang.String.format; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableCollection; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static java.util.Objects.requireNonNull; public class Analysis { @Nullable private final Statement root; private final List<Expression> parameters; private String updateType; private final Map<NodeRef<Table>, Query> namedQueries = new LinkedHashMap<>(); private final Map<NodeRef<Node>, Scope> scopes = new LinkedHashMap<>(); private final Multimap<NodeRef<Expression>, FieldId> columnReferences = ArrayListMultimap.create(); // a map of users to the columns per table that they access private final Map<AccessControlInfo, Map<QualifiedObjectName, Set<String>>> tableColumnReferences = new LinkedHashMap<>(); private final Map<AccessControlInfo, Map<QualifiedObjectName, Set<String>>> utilizedTableColumnReferences = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, List<FunctionCall>> aggregates = new LinkedHashMap<>(); private final Map<NodeRef<OrderBy>, List<Expression>> orderByAggregates = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, List<Expression>> groupByExpressions = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, GroupingSetAnalysis> groupingSets = new LinkedHashMap<>(); private final Map<NodeRef<Node>, Expression> where = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, Expression> having = new LinkedHashMap<>(); private final Map<NodeRef<Node>, List<Expression>> orderByExpressions = new LinkedHashMap<>(); private final Set<NodeRef<OrderBy>> redundantOrderBy = new HashSet<>(); private final Map<NodeRef<Node>, List<Expression>> outputExpressions = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, List<FunctionCall>> windowFunctions = new LinkedHashMap<>(); private final Map<NodeRef<OrderBy>, List<FunctionCall>> orderByWindowFunctions = new LinkedHashMap<>(); private final Map<NodeRef<Join>, Expression> joins = new LinkedHashMap<>(); private final Map<NodeRef<Join>, JoinUsingAnalysis> joinUsing = new LinkedHashMap<>(); private final ListMultimap<NodeRef<Node>, InPredicate> inPredicatesSubqueries = ArrayListMultimap.create(); private final ListMultimap<NodeRef<Node>, SubqueryExpression> scalarSubqueries = ArrayListMultimap.create(); private final ListMultimap<NodeRef<Node>, ExistsPredicate> existsSubqueries = ArrayListMultimap.create(); private final ListMultimap<NodeRef<Node>, QuantifiedComparisonExpression> quantifiedComparisonSubqueries = ArrayListMultimap.create(); private final Map<NodeRef<Table>, TableHandle> tables = new LinkedHashMap<>(); private final Map<NodeRef<Expression>, Type> types = new LinkedHashMap<>(); private final Map<NodeRef<Expression>, Type> coercions = new LinkedHashMap<>(); private final Set<NodeRef<Expression>> typeOnlyCoercions = new LinkedHashSet<>(); private final Map<NodeRef<Relation>, List<Type>> relationCoercions = new LinkedHashMap<>(); private final Map<NodeRef<FunctionCall>, FunctionHandle> functionHandles = new LinkedHashMap<>(); private final Map<NodeRef<Identifier>, LambdaArgumentDeclaration> lambdaArgumentReferences = new LinkedHashMap<>(); private final Map<Field, ColumnHandle> columns = new LinkedHashMap<>(); private final Map<NodeRef<SampledRelation>, Double> sampleRatios = new LinkedHashMap<>(); private final Map<NodeRef<QuerySpecification>, List<GroupingOperation>> groupingOperations = new LinkedHashMap<>(); // for create table private Optional<QualifiedObjectName> createTableDestination = Optional.empty(); private Map<String, Expression> createTableProperties = ImmutableMap.of(); private boolean createTableAsSelectWithData = true; private boolean createTableAsSelectNoOp; private Optional<List<Identifier>> createTableColumnAliases = Optional.empty(); private Optional<String> createTableComment = Optional.empty(); private Optional<Insert> insert = Optional.empty(); private Optional<RefreshMaterializedViewAnalysis> refreshMaterializedViewAnalysis = Optional.empty(); private Optional<TableHandle> analyzeTarget = Optional.empty(); // for describe input and describe output private final boolean isDescribe; // for recursive view detection private final Deque<Table> tablesForView = new ArrayDeque<>(); // To prevent recursive analyzing of one materialized view base table private final ListMultimap<NodeRef<Table>, Table> tablesForMaterializedView = ArrayListMultimap.create(); // for materialized view analysis state detection, state is used to identify if materialized view has been expanded or in-process. private final Map<Table, MaterializedViewAnalysisState> materializedViewAnalysisStateMap = new HashMap<>(); public Analysis(@Nullable Statement root, List<Expression> parameters, boolean isDescribe) { requireNonNull(parameters); this.root = root; this.parameters = ImmutableList.copyOf(requireNonNull(parameters, "parameters is null")); this.isDescribe = isDescribe; } public Statement getStatement() { return root; } public String getUpdateType() { return updateType; } public void setUpdateType(String updateType) { this.updateType = updateType; } public boolean isCreateTableAsSelectWithData() { return createTableAsSelectWithData; } public void setCreateTableAsSelectWithData(boolean createTableAsSelectWithData) { this.createTableAsSelectWithData = createTableAsSelectWithData; } public boolean isCreateTableAsSelectNoOp() { return createTableAsSelectNoOp; } public void setCreateTableAsSelectNoOp(boolean createTableAsSelectNoOp) { this.createTableAsSelectNoOp = createTableAsSelectNoOp; } public void setAggregates(QuerySpecification node, List<FunctionCall> aggregates) { this.aggregates.put(NodeRef.of(node), ImmutableList.copyOf(aggregates)); } public List<FunctionCall> getAggregates(QuerySpecification query) { return aggregates.get(NodeRef.of(query)); } public void setOrderByAggregates(OrderBy node, List<Expression> aggregates) { this.orderByAggregates.put(NodeRef.of(node), ImmutableList.copyOf(aggregates)); } public List<Expression> getOrderByAggregates(OrderBy node) { return orderByAggregates.get(NodeRef.of(node)); } public Map<NodeRef<Expression>, Type> getTypes() { return unmodifiableMap(types); } public Type getType(Expression expression) { Type type = types.get(NodeRef.of(expression)); checkArgument(type != null, "Expression not analyzed: %s", expression); return type; } public Type getTypeWithCoercions(Expression expression) { NodeRef<Expression> key = NodeRef.of(expression); checkArgument(types.containsKey(key), "Expression not analyzed: %s", expression); if (coercions.containsKey(key)) { return coercions.get(key); } return types.get(key); } public Type[] getRelationCoercion(Relation relation) { return Optional.ofNullable(relationCoercions.get(NodeRef.of(relation))) .map(types -> types.stream().toArray(Type[]::new)) .orElse(null); } public void addRelationCoercion(Relation relation, Type[] types) { relationCoercions.put(NodeRef.of(relation), ImmutableList.copyOf(types)); } public Map<NodeRef<Expression>, Type> getCoercions() { return unmodifiableMap(coercions); } public Set<NodeRef<Expression>> getTypeOnlyCoercions() { return unmodifiableSet(typeOnlyCoercions); } public Type getCoercion(Expression expression) { return coercions.get(NodeRef.of(expression)); } public void addLambdaArgumentReferences(Map<NodeRef<Identifier>, LambdaArgumentDeclaration> lambdaArgumentReferences) { this.lambdaArgumentReferences.putAll(lambdaArgumentReferences); } public LambdaArgumentDeclaration getLambdaArgumentReference(Identifier identifier) { return lambdaArgumentReferences.get(NodeRef.of(identifier)); } public Map<NodeRef<Identifier>, LambdaArgumentDeclaration> getLambdaArgumentReferences() { return unmodifiableMap(lambdaArgumentReferences); } public void setGroupingSets(QuerySpecification node, GroupingSetAnalysis groupingSets) { this.groupingSets.put(NodeRef.of(node), groupingSets); } public void setGroupByExpressions(QuerySpecification node, List<Expression> expressions) { groupByExpressions.put(NodeRef.of(node), expressions); } public boolean isAggregation(QuerySpecification node) { return groupByExpressions.containsKey(NodeRef.of(node)); } public boolean isTypeOnlyCoercion(Expression expression) { return typeOnlyCoercions.contains(NodeRef.of(expression)); } public GroupingSetAnalysis getGroupingSets(QuerySpecification node) { return groupingSets.get(NodeRef.of(node)); } public List<Expression> getGroupByExpressions(QuerySpecification node) { return groupByExpressions.get(NodeRef.of(node)); } public void setWhere(Node node, Expression expression) { where.put(NodeRef.of(node), expression); } public Expression getWhere(QuerySpecification node) { return where.get(NodeRef.<Node>of(node)); } public void setOrderByExpressions(Node node, List<Expression> items) { orderByExpressions.put(NodeRef.of(node), ImmutableList.copyOf(items)); } public List<Expression> getOrderByExpressions(Node node) { return orderByExpressions.get(NodeRef.of(node)); } public void setOutputExpressions(Node node, List<Expression> expressions) { outputExpressions.put(NodeRef.of(node), ImmutableList.copyOf(expressions)); } public List<Expression> getOutputExpressions(Node node) { return outputExpressions.get(NodeRef.of(node)); } public void setHaving(QuerySpecification node, Expression expression) { having.put(NodeRef.of(node), expression); } public void setJoinCriteria(Join node, Expression criteria) { joins.put(NodeRef.of(node), criteria); } public Expression getJoinCriteria(Join join) { return joins.get(NodeRef.of(join)); } public void recordSubqueries(Node node, ExpressionAnalysis expressionAnalysis) { NodeRef<Node> key = NodeRef.of(node); this.inPredicatesSubqueries.putAll(key, dereference(expressionAnalysis.getSubqueryInPredicates())); this.scalarSubqueries.putAll(key, dereference(expressionAnalysis.getScalarSubqueries())); this.existsSubqueries.putAll(key, dereference(expressionAnalysis.getExistsSubqueries())); this.quantifiedComparisonSubqueries.putAll(key, dereference(expressionAnalysis.getQuantifiedComparisons())); } private <T extends Node> List<T> dereference(Collection<NodeRef<T>> nodeRefs) { return nodeRefs.stream() .map(NodeRef::getNode) .collect(toImmutableList()); } public List<InPredicate> getInPredicateSubqueries(Node node) { return ImmutableList.copyOf(inPredicatesSubqueries.get(NodeRef.of(node))); } public List<SubqueryExpression> getScalarSubqueries(Node node) { return ImmutableList.copyOf(scalarSubqueries.get(NodeRef.of(node))); } public boolean isScalarSubquery(SubqueryExpression subqueryExpression) { return scalarSubqueries.values().contains(subqueryExpression); } public List<ExistsPredicate> getExistsSubqueries(Node node) { return ImmutableList.copyOf(existsSubqueries.get(NodeRef.of(node))); } public List<QuantifiedComparisonExpression> getQuantifiedComparisonSubqueries(Node node) { return unmodifiableList(quantifiedComparisonSubqueries.get(NodeRef.of(node))); } public void setWindowFunctions(QuerySpecification node, List<FunctionCall> functions) { windowFunctions.put(NodeRef.of(node), ImmutableList.copyOf(functions)); } public List<FunctionCall> getWindowFunctions(QuerySpecification query) { return windowFunctions.get(NodeRef.of(query)); } public void setOrderByWindowFunctions(OrderBy node, List<FunctionCall> functions) { orderByWindowFunctions.put(NodeRef.of(node), ImmutableList.copyOf(functions)); } public List<FunctionCall> getOrderByWindowFunctions(OrderBy query) { return orderByWindowFunctions.get(NodeRef.of(query)); } public void addColumnReferences(Map<NodeRef<Expression>, FieldId> columnReferences) { this.columnReferences.putAll(forMap(columnReferences)); } public void addColumnReference(NodeRef<Expression> node, FieldId fieldId) { this.columnReferences.put(node, fieldId); } public Scope getScope(Node node) { return tryGetScope(node).orElseThrow(() -> new IllegalArgumentException(format("Analysis does not contain information for node: %s", node))); } public Optional<Scope> tryGetScope(Node node) { NodeRef<Node> key = NodeRef.of(node); if (scopes.containsKey(key)) { return Optional.of(scopes.get(key)); } return Optional.empty(); } public Scope getRootScope() { return getScope(root); } public void setScope(Node node, Scope scope) { scopes.put(NodeRef.of(node), scope); } public RelationType getOutputDescriptor() { return getOutputDescriptor(root); } public RelationType getOutputDescriptor(Node node) { return getScope(node).getRelationType(); } public TableHandle getTableHandle(Table table) { return tables.get(NodeRef.of(table)); } public Collection<TableHandle> getTables() { return unmodifiableCollection(tables.values()); } public List<Table> getTableNodes() { return tables.keySet().stream().map(NodeRef::getNode).collect(toImmutableList()); } public void registerTable(Table table, TableHandle handle) { tables.put(NodeRef.of(table), handle); } public FunctionHandle getFunctionHandle(FunctionCall function) { return functionHandles.get(NodeRef.of(function)); } public Map<NodeRef<FunctionCall>, FunctionHandle> getFunctionHandles() { return ImmutableMap.copyOf(functionHandles); } public void addFunctionHandles(Map<NodeRef<FunctionCall>, FunctionHandle> infos) { functionHandles.putAll(infos); } public Set<NodeRef<Expression>> getColumnReferences() { return unmodifiableSet(columnReferences.keySet()); } public Multimap<NodeRef<Expression>, FieldId> getColumnReferenceFields() { return unmodifiableMultimap(columnReferences); } public boolean isColumnReference(Expression expression) { requireNonNull(expression, "expression is null"); checkArgument(getType(expression) != null, "expression %s has not been analyzed", expression); return columnReferences.containsKey(NodeRef.of(expression)); } public void addTypes(Map<NodeRef<Expression>, Type> types) { this.types.putAll(types); } public void addCoercion(Expression expression, Type type, boolean isTypeOnlyCoercion) { this.coercions.put(NodeRef.of(expression), type); if (isTypeOnlyCoercion) { this.typeOnlyCoercions.add(NodeRef.of(expression)); } } public void addCoercions(Map<NodeRef<Expression>, Type> coercions, Set<NodeRef<Expression>> typeOnlyCoercions) { this.coercions.putAll(coercions); this.typeOnlyCoercions.addAll(typeOnlyCoercions); } public Expression getHaving(QuerySpecification query) { return having.get(NodeRef.of(query)); } public void setColumn(Field field, ColumnHandle handle) { columns.put(field, handle); } public ColumnHandle getColumn(Field field) { return columns.get(field); } public void setCreateTableDestination(QualifiedObjectName destination) { this.createTableDestination = Optional.of(destination); } public Optional<QualifiedObjectName> getCreateTableDestination() { return createTableDestination; } public Optional<TableHandle> getAnalyzeTarget() { return analyzeTarget; } public void setAnalyzeTarget(TableHandle analyzeTarget) { this.analyzeTarget = Optional.of(analyzeTarget); } public void setCreateTableProperties(Map<String, Expression> createTableProperties) { this.createTableProperties = ImmutableMap.copyOf(createTableProperties); } public Map<String, Expression> getCreateTableProperties() { return createTableProperties; } public Optional<List<Identifier>> getColumnAliases() { return createTableColumnAliases; } public void setCreateTableColumnAliases(List<Identifier> createTableColumnAliases) { this.createTableColumnAliases = Optional.of(createTableColumnAliases); } public void setCreateTableComment(Optional<String> createTableComment) { this.createTableComment = requireNonNull(createTableComment); } public Optional<String> getCreateTableComment() { return createTableComment; } public void setInsert(Insert insert) { this.insert = Optional.of(insert); } public Optional<Insert> getInsert() { return insert; } public void setRefreshMaterializedViewAnalysis(RefreshMaterializedViewAnalysis refreshMaterializedViewAnalysis) { this.refreshMaterializedViewAnalysis = Optional.of(refreshMaterializedViewAnalysis); } public Optional<RefreshMaterializedViewAnalysis> getRefreshMaterializedViewAnalysis() { return refreshMaterializedViewAnalysis; } public Query getNamedQuery(Table table) { return namedQueries.get(NodeRef.of(table)); } public void registerNamedQuery(Table tableReference, Query query) { requireNonNull(tableReference, "tableReference is null"); requireNonNull(query, "query is null"); namedQueries.put(NodeRef.of(tableReference), query); } public void registerTableForView(Table tableReference) { tablesForView.push(requireNonNull(tableReference, "table is null")); } public void unregisterTableForView() { tablesForView.pop(); } public void registerMaterializedViewForAnalysis(Table materializedView) { requireNonNull(materializedView, "materializedView is null"); if (materializedViewAnalysisStateMap.containsKey(materializedView)) { materializedViewAnalysisStateMap.put(materializedView, VISITED); } else { materializedViewAnalysisStateMap.put(materializedView, VISITING); } } public void unregisterMaterializedViewForAnalysis(Table materializedView) { requireNonNull(materializedView, "materializedView is null"); checkState( materializedViewAnalysisStateMap.containsKey(materializedView), format("materializedViewAnalysisStateMap does not contain materialized view : %s", materializedView.getName())); materializedViewAnalysisStateMap.remove(materializedView); } public MaterializedViewAnalysisState getMaterializedViewAnalysisState(Table materializedView) { requireNonNull(materializedView, "materializedView is null"); return materializedViewAnalysisStateMap.getOrDefault(materializedView, NOT_VISITED); } public boolean hasTableInView(Table tableReference) { return tablesForView.contains(tableReference); } public void registerTableForMaterializedView(Table view, Table table) { requireNonNull(view, "view is null"); requireNonNull(table, "table is null"); tablesForMaterializedView.put(NodeRef.of(view), table); } public void unregisterTableForMaterializedView(Table view, Table table) { requireNonNull(view, "view is null"); requireNonNull(table, "table is null"); tablesForMaterializedView.remove(NodeRef.of(view), table); } public boolean hasTableRegisteredForMaterializedView(Table view, Table table) { requireNonNull(view, "view is null"); requireNonNull(table, "table is null"); return tablesForMaterializedView.containsEntry(NodeRef.of(view), table); } public void setSampleRatio(SampledRelation relation, double ratio) { sampleRatios.put(NodeRef.of(relation), ratio); } public double getSampleRatio(SampledRelation relation) { NodeRef<SampledRelation> key = NodeRef.of(relation); checkState(sampleRatios.containsKey(key), "Sample ratio missing for %s. Broken analysis?", relation); return sampleRatios.get(key); } public void setGroupingOperations(QuerySpecification querySpecification, List<GroupingOperation> groupingOperations) { this.groupingOperations.put(NodeRef.of(querySpecification), ImmutableList.copyOf(groupingOperations)); } public List<GroupingOperation> getGroupingOperations(QuerySpecification querySpecification) { return Optional.ofNullable(groupingOperations.get(NodeRef.of(querySpecification))) .orElse(emptyList()); } public List<Expression> getParameters() { return parameters; } public boolean isDescribe() { return isDescribe; } public void setJoinUsing(Join node, JoinUsingAnalysis analysis) { joinUsing.put(NodeRef.of(node), analysis); } public JoinUsingAnalysis getJoinUsing(Join node) { return joinUsing.get(NodeRef.of(node)); } public void addTableColumnReferences(AccessControl accessControl, Identity identity, Multimap<QualifiedObjectName, String> tableColumnMap) { AccessControlInfo accessControlInfo = new AccessControlInfo(accessControl, identity); Map<QualifiedObjectName, Set<String>> references = tableColumnReferences.computeIfAbsent(accessControlInfo, k -> new LinkedHashMap<>()); tableColumnMap.asMap() .forEach((key, value) -> references.computeIfAbsent(key, k -> new HashSet<>()).addAll(value)); } public void addEmptyColumnReferencesForTable(AccessControl accessControl, Identity identity, QualifiedObjectName table) { AccessControlInfo accessControlInfo = new AccessControlInfo(accessControl, identity); tableColumnReferences.computeIfAbsent(accessControlInfo, k -> new LinkedHashMap<>()).computeIfAbsent(table, k -> new HashSet<>()); } public Map<AccessControlInfo, Map<QualifiedObjectName, Set<String>>> getTableColumnReferences() { return tableColumnReferences; } public void addUtilizedTableColumnReferences(AccessControlInfo accessControlInfo, Map<QualifiedObjectName, Set<String>> utilizedTableColumms) { utilizedTableColumnReferences.put(accessControlInfo, utilizedTableColumms); } public Map<AccessControlInfo, Map<QualifiedObjectName, Set<String>>> getUtilizedTableColumnReferences() { return ImmutableMap.copyOf(utilizedTableColumnReferences); } public Map<AccessControlInfo, Map<QualifiedObjectName, Set<String>>> getTableColumnReferencesForAccessControl(Session session) { return isCheckAccessControlOnUtilizedColumnsOnly(session) ? utilizedTableColumnReferences : tableColumnReferences; } public void markRedundantOrderBy(OrderBy orderBy) { redundantOrderBy.add(NodeRef.of(orderBy)); } public boolean isOrderByRedundant(OrderBy orderBy) { return redundantOrderBy.contains(NodeRef.of(orderBy)); } public Map<String, Map<SchemaTableName, String>> getOriginalColumnMapping(Node node) { return getOutputDescriptor(node).getVisibleFields().stream() .filter(field -> field.getOriginTable().isPresent() && field.getOriginColumnName().isPresent()) .collect(toImmutableMap( field -> field.getName().get(), field -> ImmutableMap.of(toSchemaTableName(field.getOriginTable().get()), field.getOriginColumnName().get()))); } @Immutable public static final class Insert { private final TableHandle target; private final List<ColumnHandle> columns; public Insert(TableHandle target, List<ColumnHandle> columns) { this.target = requireNonNull(target, "target is null"); this.columns = requireNonNull(columns, "columns is null"); checkArgument(columns.size() > 0, "No columns given to insert"); } public List<ColumnHandle> getColumns() { return columns; } public TableHandle getTarget() { return target; } } @Immutable public static final class RefreshMaterializedViewAnalysis { private final TableHandle target; private final List<ColumnHandle> columns; private final Query query; public RefreshMaterializedViewAnalysis(TableHandle target, List<ColumnHandle> columns, Query query) { this.target = requireNonNull(target, "target is null"); this.columns = requireNonNull(columns, "columns is null"); this.query = requireNonNull(query, "query is null"); checkArgument(columns.size() > 0, "No columns given to insert"); } public List<ColumnHandle> getColumns() { return columns; } public TableHandle getTarget() { return target; } public Query getQuery() { return query; } } public static final class JoinUsingAnalysis { private final List<Integer> leftJoinFields; private final List<Integer> rightJoinFields; private final List<Integer> otherLeftFields; private final List<Integer> otherRightFields; JoinUsingAnalysis(List<Integer> leftJoinFields, List<Integer> rightJoinFields, List<Integer> otherLeftFields, List<Integer> otherRightFields) { this.leftJoinFields = ImmutableList.copyOf(leftJoinFields); this.rightJoinFields = ImmutableList.copyOf(rightJoinFields); this.otherLeftFields = ImmutableList.copyOf(otherLeftFields); this.otherRightFields = ImmutableList.copyOf(otherRightFields); checkArgument(leftJoinFields.size() == rightJoinFields.size(), "Expected join fields for left and right to have the same size"); } public List<Integer> getLeftJoinFields() { return leftJoinFields; } public List<Integer> getRightJoinFields() { return rightJoinFields; } public List<Integer> getOtherLeftFields() { return otherLeftFields; } public List<Integer> getOtherRightFields() { return otherRightFields; } } public static class GroupingSetAnalysis { private final List<Set<FieldId>> cubes; private final List<List<FieldId>> rollups; private final List<List<Set<FieldId>>> ordinarySets; private final List<Expression> complexExpressions; public GroupingSetAnalysis( List<Set<FieldId>> cubes, List<List<FieldId>> rollups, List<List<Set<FieldId>>> ordinarySets, List<Expression> complexExpressions) { this.cubes = ImmutableList.copyOf(cubes); this.rollups = ImmutableList.copyOf(rollups); this.ordinarySets = ImmutableList.copyOf(ordinarySets); this.complexExpressions = ImmutableList.copyOf(complexExpressions); } public List<Set<FieldId>> getCubes() { return cubes; } public List<List<FieldId>> getRollups() { return rollups; } public List<List<Set<FieldId>>> getOrdinarySets() { return ordinarySets; } public List<Expression> getComplexExpressions() { return complexExpressions; } } public enum MaterializedViewAnalysisState { NOT_VISITED(0), VISITING(1), VISITED(2); private final int value; MaterializedViewAnalysisState(int value) { this.value = value; } public boolean isNotVisited() { return this.value == NOT_VISITED.value; } public boolean isVisited() { return this.value == VISITED.value; } public boolean isVisiting() { return this.value == VISITING.value; } } public static final class AccessControlInfo { private final AccessControl accessControl; private final Identity identity; public AccessControlInfo(AccessControl accessControl, Identity identity) { this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.identity = requireNonNull(identity, "identity is null"); } public AccessControl getAccessControl() { return accessControl; } public Identity getIdentity() { return identity; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AccessControlInfo that = (AccessControlInfo) o; return Objects.equals(accessControl, that.accessControl) && Objects.equals(identity, that.identity); } @Override public int hashCode() { return Objects.hash(accessControl, identity); } @Override public String toString() { return format("AccessControl: %s, Identity: %s", accessControl.getClass(), identity); } } }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import com.thoughtworks.go.config.remote.ConfigOrigin; import com.thoughtworks.go.config.remote.ConfigReposConfig; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.EnvironmentPipelineMatcher; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import org.apache.commons.lang.builder.ToStringBuilder; import static com.thoughtworks.go.util.command.EnvironmentVariableContext.GO_ENVIRONMENT_NAME; /** * @understands the current persistent information related to a logical grouping of machines */ @ConfigTag("environment") public class BasicEnvironmentConfig implements EnvironmentConfig { @ConfigAttribute(value = NAME_FIELD, optional = false) private CaseInsensitiveString name; @ConfigSubtag private EnvironmentVariablesConfig variables = new EnvironmentVariablesConfig(); @ConfigSubtag private EnvironmentAgentsConfig agents = new EnvironmentAgentsConfig(); @ConfigSubtag private EnvironmentPipelinesConfig pipelines = new EnvironmentPipelinesConfig(); private final ConfigErrors configErrors = new ConfigErrors(); private ConfigOrigin origin; public BasicEnvironmentConfig() { } public BasicEnvironmentConfig(final CaseInsensitiveString name) { this.name = name; } @Override public void validate(ValidationContext validationContext) { // each of these references is defined in this.origin for (EnvironmentPipelineConfig pipelineRefConfig : this.pipelines) { ConfigReposConfig configRepos = validationContext.getConfigRepos(); PipelineConfig pipelineConfig = validationContext.getPipelineConfigByName(pipelineRefConfig.getName()); if (pipelineConfig == null) { continue;//other rule will error that we reference unknown pipeline } if (validationContext.shouldCheckConfigRepo()) { if (!configRepos.isReferenceAllowed(this.origin, pipelineConfig.getOrigin())) pipelineRefConfig.addError(EnvironmentPipelineConfig.ORIGIN, String.format("Environment defined in %s cannot reference a pipeline in %s", this.origin, displayNameFor(pipelineConfig.getOrigin()))); } } } private String displayNameFor(ConfigOrigin origin) { return origin != null ? origin.displayName() : "cruise-config.xml"; } @Override public ConfigErrors errors() { return configErrors; } @Override public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } @Override public EnvironmentPipelineMatcher createMatcher() { return new EnvironmentPipelineMatcher(name, agents.getUuids(), pipelines); } @Override public boolean hasAgent(String uuid) { for (EnvironmentAgentConfig agent : agents) { if (agent.hasUuid(uuid)) { return true; } } return false; } @Override public boolean validateContainsOnlyUuids(Set<String> uuids) { boolean isValid = true; for (EnvironmentAgentConfig agent : agents) { isValid = agent.validateUuidPresent(name, uuids) && isValid; } return isValid; } @Override public boolean containsPipeline(final CaseInsensitiveString pipelineName) { return pipelines.containsPipelineNamed(pipelineName); } @Override public void addAgent(String uuid) { agents.add(new EnvironmentAgentConfig(uuid)); } @Override public void addAgentIfNew(String uuid) { EnvironmentAgentConfig agentConfig = new EnvironmentAgentConfig(uuid); if (!agents.contains(agentConfig)) { agents.add(agentConfig); } } @Override public void removeAgent(String uuid) { agents.remove(new EnvironmentAgentConfig(uuid)); } @Override public boolean hasName(final CaseInsensitiveString environmentName) { return name.equals(environmentName); } @Override public void addPipeline(final CaseInsensitiveString pipelineName) { pipelines.add(new EnvironmentPipelineConfig(pipelineName)); } @Override public boolean contains(String pipelineName) { return pipelines.containsPipelineNamed(new CaseInsensitiveString(pipelineName)); } @Override public void validateContainsOnlyPipelines(List<CaseInsensitiveString> pipelineNames) { pipelines.validateContainsOnlyPipelines(name, pipelineNames); } @Override public boolean hasSamePipelinesAs(EnvironmentConfig other) { for (EnvironmentPipelineConfig pipeline : pipelines) { for(CaseInsensitiveString name : other.getPipelineNames()) { if(name.equals(pipeline.getName())) return true; } } return false; } @Override public CaseInsensitiveString name() { return name; } public void setName(CaseInsensitiveString name) { this.name = name; } @Override public EnvironmentAgentsConfig getAgents() { return agents; } public void setAgents(List<EnvironmentAgentConfig> agents) { this.agents.clear(); this.agents.addAll(agents); } @Override public boolean equals(Object o) { if (this == o) { return true; } EnvironmentConfig that = as(EnvironmentConfig.class,o); if(that == null) return false; if (agents != null ? !agents.equals(that.getAgents()) : that.getAgents() != null) { return false; } if (name != null ? !name.equals(that.name()) : that.name() != null) { return false; } if (pipelines != null ? !pipelines.equals(that.getPipelines()) : that.getPipelines() != null) { return false; } if (variables != null ? !variables.equals(that.getVariables()) : that.getVariables() != null) { return false; } return true; } @Override public int hashCode() { int result = (name != null ? name.hashCode() : 0); result = 31 * result + (agents != null ? agents.hashCode() : 0); result = 31 * result + (pipelines != null ? pipelines.hashCode() : 0); result = 31 * result + (variables != null ? variables.hashCode() : 0); return result; } private static <T> T as(Class<T> clazz, Object o){ if(clazz.isInstance(o)){ return clazz.cast(o); } return null; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } @Override public void addEnvironmentVariable(String name, String value) { variables.add(new EnvironmentVariableConfig(name.trim(), value)); } @Override public EnvironmentVariableContext createEnvironmentContext() { EnvironmentVariableContext context = new EnvironmentVariableContext( GO_ENVIRONMENT_NAME, CaseInsensitiveString.str(name)); variables.addTo(context); return context; } @Override public List<CaseInsensitiveString> getPipelineNames() { ArrayList<CaseInsensitiveString> pipelineNames = new ArrayList<>(); for (EnvironmentPipelineConfig pipeline : pipelines) { pipelineNames.add(pipeline.getName()); } return pipelineNames; } @Override public EnvironmentPipelinesConfig getPipelines() { return pipelines; } public void setPipelines(List<EnvironmentPipelineConfig> pipelines) { this.pipelines.clear(); this.pipelines.addAll(pipelines); } @Override public boolean hasVariable(String variableName) { return variables.hasVariable(variableName); } @Override public EnvironmentVariablesConfig getVariables() { return variables; } public void setVariables(EnvironmentVariablesConfig environmentVariables) { this.variables = environmentVariables; } @Override public void setConfigAttributes(Object attributes) { if (attributes == null) { return; } Map attributeMap = (Map) attributes; if (attributeMap.containsKey(NAME_FIELD)) { name = new CaseInsensitiveString((String) attributeMap.get(NAME_FIELD)); } if (attributeMap.containsKey(PIPELINES_FIELD)) { pipelines.setConfigAttributes(attributeMap.get(PIPELINES_FIELD)); } if (attributeMap.containsKey(AGENTS_FIELD)) { agents.setConfigAttributes(attributeMap.get(AGENTS_FIELD)); } if (attributeMap.containsKey(VARIABLES_FIELD)) { variables.setConfigAttributes(attributeMap.get(VARIABLES_FIELD)); } } @Override public EnvironmentVariablesConfig getPlainTextVariables() { return variables.getPlainTextVariables(); } @Override public EnvironmentVariablesConfig getSecureVariables() { return variables.getSecureVariables(); } @Override public EnvironmentConfig getLocal() { if(this.isLocal()) return this; else return null; } @Override public ConfigOrigin getOrigin() { return origin; } @Override public void setOrigins(ConfigOrigin origins) { this.origin = origins; for(EnvironmentVariableConfig environmentVariableConfig : this.variables) { environmentVariableConfig.setOrigins(origins); } } @Override public EnvironmentPipelinesConfig getRemotePipelines() { if(this.isLocal()) return new EnvironmentPipelinesConfig(); else return this.pipelines; } @Override public EnvironmentAgentsConfig getLocalAgents() { if(this.isLocal()) return this.agents; else return new EnvironmentAgentsConfig(); } public boolean isLocal() { return this.origin == null || this.origin.isLocal(); } @Override public boolean isEnvironmentEmpty() { return this.variables.isEmpty() && this.agents.isEmpty() && this.pipelines.isEmpty(); } @Override public boolean containsPipelineRemotely(CaseInsensitiveString pipelineName) { if(this.isLocal()) return false; if(!this.containsPipeline(pipelineName)) return false; return true; } }
package ru.fastcards.utils; import ru.fastcards.R; /** * * @author Denis V * @since 20.11.2013 modified *Add TABLE_APPEAL */ public final class Constants { private Constants(){}; // public static final int PHOTO_BOX_SCROLL_VIEW_ROWS_NUMBER = 3; // public static final int SHOP_CARDS_COLUMNS_NUM = 3; public static final String CACHE_FILE_NAME = "The_project_cache"; public static final String CACHE_VKONTAKTE_TOKEN = "vkontakte_cached_token"; public static final String CACHE_VKONTAKTE_USER_ID = "vkontakte_cached_user_id"; public static final String CACHE_FACEBOOK_TOKEN = "facebook_cached_token"; public static final String CACHE_INSTAGRAM_TOKEN = "instagram_cached_token"; public static final String CACHE_FASTCARDS_USER_ID = "fastcards_user_id"; public static final String CACHE_USER_WEALTH = "user_wealth"; public static final String API_ID = "2800844"; //using here vk id just for now public static final String id_facebook= "1414152868808243"; //Request constants for startActivityForResult public static final int REQUEST_VK_AUTH = 101; public static final int REQUEST_MESSAGE_POSTCARD_VK = 102; public static final int REQUEST_WALL_POSTCARD_VK = 103; public static final int REQUEST_MESSAGE_POSTCARD_FB = 104; public static final int REQUEST_WALL_POSTCARD_FB = 105; public static final int REQUEST_FACEBOOK = 106;//"WEB_VIEW_AUTH_HANDLER_TOKEN_KEY"; public static final int REQUEST_OK = 107; public static final int REQUEST_WALL_POSTCARD_OK = 108; public static final int REQUEST_MESSAGE_POSTCARD_OK = 109; public static final int REQUEST_DISPLAYER = 110; public static final int REQUEST_VK_RECIPIENTS = 111; public static final int REQUEST_CONTACTS = 112; public static final int REQUEST_CREATE_EVENT = 113; public static final int REQUEST_CATEGORY = 114; public static final int REQUEST_CATEGORY_GROUP = 115; public static final int REQUEST_RECIPIENTS = 116; public static final int REQUEST_NOTIFICATION = 117; public static final int REQUEST_BUY = 118; public static final int REQUEST_MANAGE_EVENT = 119; public static final int REQUEST_MODIFY_GROUP = 120; public static final int REQUEST_MODIFY_CONTACT = 121; public static final int REQUEST_FB_RECIPIENTS = 122; public static final int REQUEST_FB_AUTH = 123; public static final int REQUEST_RECIPIENTS_GROUP = 124; public static final int REQUEST_SEND_MMS = 125; public static final int REQUEST_SEND_EMAIL = 126; public static final int REQUEST_VK_AUTH_SENDYOURSELF = 127; public static final int REQUEST_CONTACTS_EMAIL = 128; public static final int REQUEST_CONTACTS_MSG = 129; //Extras to pass data between activities public static final String EXTRA_ALBUM_ID = "album_id"; public static final String EXTRA_PHOTOS_NUMBER = "photos_number"; public static final String EXTRA_USER_ID = "user_id"; public static final String EXTRA_LOAD_TASK = "load_task"; public static final String EXTRA_CONTENT_OWNER = "whoose_albums"; public static final String EXTRA_PHOTO_URL = "photo_url"; public static final String EXTRA_POSTCARDS_URL_ARRAY = "photos_url_array"; public static final String EXTRA_INITIAL_PAGE = "intial_page"; public static final String EXTRA_TITLE = "extra_title"; public static final String EXTRA_RECIPIENTS_IDS = "friend_ids"; public static final String EXTRA_RECIPIENTS_POSTCARD_ID = "extra_postcard_id"; public static final String EXTRA_NAME = "extra_recipients_name"; public static final String EXTRA_RECIPIENTS_THUMB_URL = "extra_recipients_thumb_url"; public static final String EXTRA_RECIPIENTS_GENDER = "extra_recipients_gender"; public static final String EXTRA_RECIPIENTS_ORIGIN = "extra_recipients_origin"; public static final String EXTRA_RECIPIENTS_NICK_NAME = "nickname_extra"; public static final String EXTRA_SELECTED_POSTCARDS_IDS = "selected_postcard_ids"; public static final String EXTRA_THEME_ID = "postcards_ids_array"; public static final String EXTRA_PARENT_ITEM_ID = "parent_item_id"; public static final String EXTRA_PUSH = "push"; public static final String EXTRA_CATEGORY_ID = "category"; public static final String EXTRA_DATE = "extra_date"; public static final String EXTRA_PERIODICITY = "periodicity"; public static final String EXTRA_CATEGORY_GROUP = "categories_array"; public static final String EXTRA_ID = "id"; public static final String EXTRA_EVENT_ID = "event_id"; public static final String EXTRA_TYPE = "type"; public static final String EXTRA_PROJECT_ID = "ProjectId"; public static final String EXTRA_NOTIFICATION = "Notification"; public static final String EXTRA_EVENT_LOCATION = "modified_event_location"; public static final String EXTRA_POSTCARD_SELECTED = "selected_postcard_type"; public static final String EXTRA_COMUNICATION_TYPE = "extra_com_type"; public static final String EXTRA_RECIPIENTS_GROUP = "recipients_group"; public static final String EXTRA_IS_GROUP = "isGroup"; public static final String EXTRA_MESSAGE = "Message"; public static final String EXTRA_PURCHASE_ID = "purchase_id"; public static final String EXTRA_COMUNICATION_FILTER = "CommunicztionFilter"; public static final String EXTRA_FLAG = "ExtraFlag"; public static final String EXTRA_POSITION = "ExtraPosition"; public static final String EXTRA_SIZE = "Size"; //DataBase constants public static final String DATA_BASE_NAME = "TheFastcardsDataBase"; public static final String TABLE_CONTACTS = "Contacts"; public static final String TABLE_EVENTS_RECIPIENTS = "tableEventsRecipients"; public static final String TABLE_EVENTS = "tableEvents"; public static final String TABLE_COMUNICATION = "TableComunication"; public static final String TABLE_CATEGORY = "TableCategory"; public static final String TABLE_CATEGORY_GROUP = "TableCategoryGroup"; public static final String TABLE_THEMES = "TableThemes"; public static final String TABLE_TEXT_PACKS = "TextPacks"; public static final String TABLE_TEXTS = "Texts"; public static final String TABLE_APPEALS = "AppealsTable"; public static final String TABLE_VERSIONS = "Versions"; public static final String TABLE_PROJECTS = "TableProjects"; public static final String TABLE_LISTS = "TableContactGroups"; public static final String TABLE_LIST_CONTACTS = "TableGroupsRecipients"; public static final String TABLE_STARS_PURCHASES = "TableStarsPurchases"; //Origin constants - wich define where the user retrieved from public static final int ORIGIN_VK = 1501; public static final int ORIGIN_CONTACTS = 1502; public static final int ORIGIN_FB = 1503; //Comunication type constants public static final String COMUNICATION_TYPE_PHONE = "comunic_phone"; public static final String COMUNICATION_TYPE_EMAIL = "comunic_email"; public static final String COMUNICATION_TYPE_VK = "comunic_vk_id"; public static final String COMUNICATION_TYPE_FB = "comunic_fb_id"; public static final String COMUNICATION_TYPE_CONTACTS_ID = "comunic_contacts_id"; //Event constants public static final String EVENT_CATEGORY_ID_BIRTHDAY = "17"; public static final String EVENT_TYPE_BIRTHDAY = "event_type_birthday"; public static final String EVENT_TYPE_COMMON_HOLIDAYS = "event_type_common"; public static final String EVENT_TYPE_CUSTOM = "event_type_users"; public static final int EVENT_REPEAT_FALSE = 0; public static final int EVENT_REPEAT_TRUE = 1; public static final String PREFS_NAME="preferences_file"; public static final String RESENTLY_USED_ID="recently_used_id"; public static final String RECENTLY_USED_ARRAY_SIZE="recently_used_array_size"; public static final String RECENTLY_USED_TEXT_NUMBER="recently_used_text_number"; public static final String WITHOUT_APPEAL="whithout_appeal"; public static final int CLOSE=0; public static final int OPEN=1; public static final int POSTCARD_VERTICAL = 1819; public static final int POSTCARD_SQUARE = 1820; public static final int POSTCARD_HORIZONTAL = 1821; public static final int PURCHASE_TYPE_THEME_ID=1; public static final int PURCHASE_TYPE_TEXT_ID=2; public static final int PURCHASE_TYPE_OFFER_ID=3; public static final int PURCHASE_TYPE_MONEY_ID=4; public static final int CREATE_EVENT_CALL = 15; public static final int EDITOR_CALL = 25; public static final int MANADGER_CALL = 35; public static final String SHOP_DIALOG="shop_dialog"; public static final String POSTCARD_TEXT="postcard_text"; public static final String POSTCARD_TITLE_TEXT="postcard_title_text"; public static final String POSTCARD_SIGNATURE_TEXT="postcard_signature_text"; public static final String POSTCARD_SIGNATURE_IMAGE="postcard_signature_image"; public static final String POSTCARD_IMAGE="postcard_image"; public static final String VERTICAL_IMAGE="vertical_image"; public static final String SQUARE_IMAGE="square_image"; public static final String POSTCARD_FRONT_IMAGE="front_image"; public static final String POSTCARD_BACK_IMAGE="back_image"; public static final String VERSIONS_CALENDAR = "Calendar"; public static final String VERSIONS_GROUPS = "Groups"; public static final String VERSIONS_CATEGORIES = "Categories"; public static final String VERSIONS_THEMES = "Themes"; public static final String VERSIONS_TEXTS = "Texts"; public static final String VERSIONS_OFFERS = "Offers"; public static final String VERSIONS_APPEALS = "Appeals"; public static final String TAB_OFFER="tab_offer"; public static final String TAB_THEME="tab_theme"; public static final String TAB_TEXT="tab_text"; public static final int ADD=R.drawable.selector_btn_add; public static final int DELETE=R.drawable.selector_btn_delete; public static final String RED="RED"; public static final String GREEN="GREEN"; public static final String BLUE="BLUE"; public static final int GET_RECOMMENDED=0; public static final int GET_NEW=1; public static final int GET_BEST_SELLERS=2; public static final String PURCHASE_STARS = "StarsPurchase"; public static final int TRANSACTION_SUCCED = 0; public static final int NOT_ENOUGH_STARS=1; public static final String SIGNATURE_VERTICAL="SignVertical"; public static final String SIGNATURE_PRINT="SignPrint"; public static final String CACHE_USER_PRIMARY_CONTACT = "PrimaryContact"; public static final String FLAG_MODIFY_TEMPORARY = "FlagModifyTemporary"; // public static final String COMUNICATION_FILTER_MAIL = "FilterMail"; // public static final String COMUNICATION_FILTER_PHONE = "FilterPhone"; }
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: Output.java,v 1.2.4.1 2005/09/12 10:53:00 pvedula Exp $ */ package com.sun.org.apache.xalan.internal.xsltc.compiler; import java.io.OutputStreamWriter; import java.util.Properties; import java.util.StringTokenizer; import javax.xml.transform.OutputKeys; import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen; import com.sun.org.apache.bcel.internal.generic.INVOKEVIRTUAL; import com.sun.org.apache.bcel.internal.generic.InstructionList; import com.sun.org.apache.bcel.internal.generic.PUSH; import com.sun.org.apache.bcel.internal.generic.PUTFIELD; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ClassGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Util; import com.sun.org.apache.xml.internal.serializer.Encodings; import com.sun.org.apache.xml.internal.utils.XML11Char; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen * @author Morten Jorgensen */ final class Output extends TopLevelElement { // TODO: use three-value variables for boolean values: true/false/default // These attributes are extracted from the xsl:output element. They also // appear as fields (with the same type, only public) in the translet private String _version; private String _method; private String _encoding; private boolean _omitHeader = false; private String _standalone; private String _doctypePublic; private String _doctypeSystem; private String _cdata; private boolean _indent = false; private String _mediaType; private String _indentamount; // Disables this output element (when other element has higher precedence) private boolean _disabled = false; // Some global constants private final static String STRING_SIG = "Ljava/lang/String;"; private final static String XML_VERSION = "1.0"; private final static String HTML_VERSION = "4.0"; /** * Displays the contents of this element (for debugging) */ public void display(int indent) { indent(indent); Util.println("Output " + _method); } /** * Disables this <xsl:output> element in case where there are some other * <xsl:output> element (from a different imported/included stylesheet) * with higher precedence. */ public void disable() { _disabled = true; } public boolean enabled() { return !_disabled; } public String getCdata() { return _cdata; } public String getOutputMethod() { return _method; } private void transferAttribute(Output previous, String qname) { if (!hasAttribute(qname) && previous.hasAttribute(qname)) { addAttribute(qname, previous.getAttribute(qname)); } } public void mergeOutput(Output previous) { // Transfer attributes from previous xsl:output transferAttribute(previous, "version"); transferAttribute(previous, "method"); transferAttribute(previous, "encoding"); transferAttribute(previous, "doctype-system"); transferAttribute(previous, "doctype-public"); transferAttribute(previous, "media-type"); transferAttribute(previous, "indent"); transferAttribute(previous, "omit-xml-declaration"); transferAttribute(previous, "standalone"); // Merge cdata-section-elements if (previous.hasAttribute("cdata-section-elements")) { // addAttribute works as a setter if it already exists addAttribute("cdata-section-elements", previous.getAttribute("cdata-section-elements") + ' ' + getAttribute("cdata-section-elements")); } // Transfer non-standard attributes as well String prefix = lookupPrefix("http://xml.apache.org/xalan"); if (prefix != null) { transferAttribute(previous, prefix + ':' + "indent-amount"); } prefix = lookupPrefix("http://xml.apache.org/xslt"); if (prefix != null) { transferAttribute(previous, prefix + ':' + "indent-amount"); } } /** * Scans the attribute list for the xsl:output instruction */ public void parseContents(Parser parser) { final Properties outputProperties = new Properties(); // Ask the parser if it wants this <xsl:output> element parser.setOutput(this); // Do nothing if other <xsl:output> element has higher precedence if (_disabled) return; String attrib = null; // Get the output version _version = getAttribute("version"); if (_version.equals(Constants.EMPTYSTRING)) { _version = null; } else { outputProperties.setProperty(OutputKeys.VERSION, _version); } // Get the output method - "xml", "html", "text" or <qname> (but not ncname) _method = getAttribute("method"); if (_method.equals(Constants.EMPTYSTRING)) { _method = null; } if (_method != null) { _method = _method.toLowerCase(); if ((_method.equals("xml"))|| (_method.equals("html"))|| (_method.equals("text"))|| ((XML11Char.isXML11ValidQName(_method)&&(_method.indexOf(":") > 0)))) { outputProperties.setProperty(OutputKeys.METHOD, _method); } else { reportError(this, parser, ErrorMsg.INVALID_METHOD_IN_OUTPUT, _method); } } // Get the output encoding - any value accepted here _encoding = getAttribute("encoding"); if (_encoding.equals(Constants.EMPTYSTRING)) { _encoding = null; } else { try { // Create a write to verify encoding support String canonicalEncoding; canonicalEncoding = Encodings.convertMime2JavaEncoding(_encoding); OutputStreamWriter writer = new OutputStreamWriter(System.out, canonicalEncoding); } catch (java.io.UnsupportedEncodingException e) { ErrorMsg msg = new ErrorMsg(ErrorMsg.UNSUPPORTED_ENCODING, _encoding, this); parser.reportError(Constants.WARNING, msg); } outputProperties.setProperty(OutputKeys.ENCODING, _encoding); } // Should the XML header be omitted - translate to true/false attrib = getAttribute("omit-xml-declaration"); if (!attrib.equals(Constants.EMPTYSTRING)) { if (attrib.equals("yes")) { _omitHeader = true; } outputProperties.setProperty(OutputKeys.OMIT_XML_DECLARATION, attrib); } // Add 'standalone' decaration to output - use text as is _standalone = getAttribute("standalone"); if (_standalone.equals(Constants.EMPTYSTRING)) { _standalone = null; } else { outputProperties.setProperty(OutputKeys.STANDALONE, _standalone); } // Get system/public identifiers for output DOCTYPE declaration _doctypeSystem = getAttribute("doctype-system"); if (_doctypeSystem.equals(Constants.EMPTYSTRING)) { _doctypeSystem = null; } else { outputProperties.setProperty(OutputKeys.DOCTYPE_SYSTEM, _doctypeSystem); } _doctypePublic = getAttribute("doctype-public"); if (_doctypePublic.equals(Constants.EMPTYSTRING)) { _doctypePublic = null; } else { outputProperties.setProperty(OutputKeys.DOCTYPE_PUBLIC, _doctypePublic); } // Names the elements of whose text contents should be output as CDATA _cdata = getAttribute("cdata-section-elements"); if (_cdata.equals(Constants.EMPTYSTRING)) { _cdata = null; } else { StringBuffer expandedNames = new StringBuffer(); StringTokenizer tokens = new StringTokenizer(_cdata); // Make sure to store names in expanded form while (tokens.hasMoreTokens()) { String qname = tokens.nextToken(); if (!XML11Char.isXML11ValidQName(qname)) { ErrorMsg err = new ErrorMsg(ErrorMsg.INVALID_QNAME_ERR, qname, this); parser.reportError(Constants.ERROR, err); } expandedNames.append( parser.getQName(qname).toString()).append(' '); } _cdata = expandedNames.toString(); outputProperties.setProperty(OutputKeys.CDATA_SECTION_ELEMENTS, _cdata); } // Get the indent setting - only has effect for xml and html output attrib = getAttribute("indent"); if (!attrib.equals(EMPTYSTRING)) { if (attrib.equals("yes")) { _indent = true; } outputProperties.setProperty(OutputKeys.INDENT, attrib); } else if (_method != null && _method.equals("html")) { _indent = true; } // indent-amount: extension attribute of xsl:output _indentamount = getAttribute( lookupPrefix("http://xml.apache.org/xalan"), "indent-amount"); // Hack for supporting Old Namespace URI. if (_indentamount.equals(EMPTYSTRING)){ _indentamount = getAttribute( lookupPrefix("http://xml.apache.org/xslt"), "indent-amount"); } if (!_indentamount.equals(EMPTYSTRING)) { outputProperties.setProperty("indent_amount", _indentamount); } // Get the MIME type for the output file _mediaType = getAttribute("media-type"); if (_mediaType.equals(Constants.EMPTYSTRING)) { _mediaType = null; } else { outputProperties.setProperty(OutputKeys.MEDIA_TYPE, _mediaType); } // Implied properties if (_method != null) { if (_method.equals("html")) { if (_version == null) { _version = HTML_VERSION; } if (_mediaType == null) { _mediaType = "text/html"; } } else if (_method.equals("text")) { if (_mediaType == null) { _mediaType = "text/plain"; } } } // Set output properties in current stylesheet parser.getCurrentStylesheet().setOutputProperties(outputProperties); } /** * Compile code that passes the information in this <xsl:output> element * to the appropriate fields in the translet */ public void translate(ClassGenerator classGen, MethodGenerator methodGen) { // Do nothing if other <xsl:output> element has higher precedence if (_disabled) return; ConstantPoolGen cpg = classGen.getConstantPool(); InstructionList il = methodGen.getInstructionList(); int field = 0; il.append(classGen.loadTranslet()); // Only update _version field if set and different from default if ((_version != null) && (!_version.equals(XML_VERSION))) { field = cpg.addFieldref(TRANSLET_CLASS, "_version", STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _version)); il.append(new PUTFIELD(field)); } // Only update _method field if "method" attribute used if (_method != null) { field = cpg.addFieldref(TRANSLET_CLASS, "_method", STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _method)); il.append(new PUTFIELD(field)); } // Only update if _encoding field is "encoding" attribute used if (_encoding != null) { field = cpg.addFieldref(TRANSLET_CLASS, "_encoding", STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _encoding)); il.append(new PUTFIELD(field)); } // Only update if "omit-xml-declaration" used and set to 'yes' if (_omitHeader) { field = cpg.addFieldref(TRANSLET_CLASS, "_omitHeader", "Z"); il.append(DUP); il.append(new PUSH(cpg, _omitHeader)); il.append(new PUTFIELD(field)); } // Add 'standalone' decaration to output - use text as is if (_standalone != null) { field = cpg.addFieldref(TRANSLET_CLASS, "_standalone", STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _standalone)); il.append(new PUTFIELD(field)); } // Set system/public doctype only if both are set field = cpg.addFieldref(TRANSLET_CLASS,"_doctypeSystem",STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _doctypeSystem)); il.append(new PUTFIELD(field)); field = cpg.addFieldref(TRANSLET_CLASS,"_doctypePublic",STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _doctypePublic)); il.append(new PUTFIELD(field)); // Add 'medye-type' decaration to output - if used if (_mediaType != null) { field = cpg.addFieldref(TRANSLET_CLASS, "_mediaType", STRING_SIG); il.append(DUP); il.append(new PUSH(cpg, _mediaType)); il.append(new PUTFIELD(field)); } // Compile code to set output indentation on/off if (_indent) { field = cpg.addFieldref(TRANSLET_CLASS, "_indent", "Z"); il.append(DUP); il.append(new PUSH(cpg, _indent)); il.append(new PUTFIELD(field)); } //Compile code to set indent amount. if(_indentamount != null && !_indentamount.equals(EMPTYSTRING)){ field = cpg.addFieldref(TRANSLET_CLASS, "_indentamount", "I"); il.append(DUP); il.append(new PUSH(cpg, Integer.parseInt(_indentamount))); il.append(new PUTFIELD(field)); } // Forward to the translet any elements that should be output as CDATA if (_cdata != null) { int index = cpg.addMethodref(TRANSLET_CLASS, "addCdataElement", "(Ljava/lang/String;)V"); StringTokenizer tokens = new StringTokenizer(_cdata); while (tokens.hasMoreTokens()) { il.append(DUP); il.append(new PUSH(cpg, tokens.nextToken())); il.append(new INVOKEVIRTUAL(index)); } } il.append(POP); // Cleanup - pop last translet reference off stack } }
package com.dinosaurwithakatana.childcare; import java.io.UnsupportedEncodingException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import org.json.JSONException; import org.json.JSONObject; import android.app.ActionBar; import android.app.ActionBar.Tab; import android.app.ActionBar.TabListener; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.text.InputType; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; /** * @author vishnu * */ public class CreateAccountActivity extends FragmentActivity implements TabListener { public static EditText username; public static EditText password; public static EditText confirmPassword; public static EditText txtFName; public static EditText txtMName; public static EditText txtLName; public static Spinner spnrChildren; public static ChildrenSelectedListener childrenListener; private static ArrayList<LinearLayout> dynamicLayouts; public static EditText txtEmailAddress; public static EditText txtPhoneNumer; public static EditText txtZipCode; /** * Displays each section */ public static class CreateAcctFragment extends Fragment { private View rootView; protected ArrayList<EditText> childFirstNames; protected ArrayList<EditText> childMiddleNames; protected ArrayList<EditText> childLastNames; protected int numOfChildren; protected ArrayList<Double> ageLow; protected ArrayList<Double> ageHigh; public CreateAcctFragment() { } public static final String ARG_SECTION_NUMBER = "section_number"; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Bundle args = getArguments(); int screen = args.getInt(ARG_SECTION_NUMBER); switch (screen){ /* * Display the login screen */ case 1: { //inflate the required layout for the fragment rootView = inflater.inflate(R.layout.login_fragment, container, false); Log.d(TAG,"Loaded login creation screen"); username = ((EditText)rootView.findViewById(R.id.txtCreateUsername)); password = ((EditText)rootView.findViewById(R.id.txtCreatePassword)); confirmPassword = ((EditText)rootView.findViewById(R.id.txtConfirmCreatePassword)); return rootView; } /*Display the personal information screen*/ case 2: { //inflate the required layout for the fragment rootView = inflater.inflate(R.layout.personal_fragment, container, false); Log.d(TAG,"Loaded personal creation screen"); txtFName = ((EditText) rootView.findViewById(R.id.txtFirstName)); txtMName = ((EditText) rootView.findViewById(R.id.txtMiddleName)); txtLName = ((EditText) rootView.findViewById(R.id.txtLastName)); txtEmailAddress = ((EditText) rootView.findViewById(R.id.txtEmailAddress)); txtPhoneNumer = ((EditText) rootView.findViewById(R.id.txtPhoneNumber)); txtZipCode = ((EditText) rootView.findViewById(R.id.txt_zip_code)); return rootView; } /*Display the experience screen*/ case 3: { //inflate the required layout for the fragment rootView = inflater.inflate(R.layout.children_fragment, container, false); Log.d(TAG,"Loaded experience creation screen"); spnrChildren = ((Spinner) rootView.findViewById(R.id.spnrNumOfChildren)); //Add a listener to the children spinner childrenListener = new ChildrenSelectedListener(); spnrChildren.setOnItemSelectedListener(new OnItemSelectedListener() { /** * @return the education */ public int getNumberOfChildren() { return numOfChildren; } /** * @param jobType the education to set */ public void setNumOfChildren(int num) { numOfChildren = num; } @Override public void onNothingSelected(AdapterView<?> arg0) { // TODO Auto-generated method stub } @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub numOfChildren = arg2 +1; //Grab the layouts from the front ScrollView sv = (ScrollView) rootView.findViewById(R.id.childrenScrlView); Log.v(TAG,"grabbed sroll view in children fragment"); LinearLayout lytChildrenLayout = (LinearLayout)rootView.findViewById(R.id.children_linear); LinearLayout lytDynamicLayoutBase= (LinearLayout)rootView.findViewById(R.id.dynamic_children_linear); lytDynamicLayoutBase.removeAllViews(); Log.v(TAG,"grabbed linear layouts inside children fragment scroll view"); //New linear layout to display the dynamic stuff LinearLayout lytNewLayout = new LinearLayout(getActivity().getBaseContext()); lytNewLayout.setId(R.id.dynamic_children_layout); lytNewLayout.setOrientation(LinearLayout.VERTICAL); lytNewLayout.removeAllViews(); dynamicLayouts = new ArrayList<LinearLayout>(); childFirstNames = new ArrayList<EditText>(); childMiddleNames = new ArrayList<EditText>(); childLastNames = new ArrayList<EditText>(); for(int i = 0;i<numOfChildren;i++){ //Horizontal Linear Layout for each of the children LinearLayout lytTempLayout = new LinearLayout(getActivity().getBaseContext()); lytTempLayout.setOrientation(LinearLayout.VERTICAL); //Add textview to horizontal layout to prompt user TextView tvChildLabel= new TextView(getActivity().getBaseContext()); tvChildLabel.setText("Child "+(i+1)); //Layout to hold name stuff LinearLayout lytNameLayout = new LinearLayout(getActivity().getBaseContext()); lytNameLayout.setOrientation(LinearLayout.VERTICAL); //TextView to prompt the user for their child's name TextView tvNamePrompt = new TextView(getActivity().getBaseContext()); tvNamePrompt.setText("Child's Name"); //EditText to accept first name EditText edtxtChildFirstName = new EditText(getActivity().getBaseContext()); edtxtChildFirstName.setHint("First Name"); edtxtChildFirstName.setInputType(InputType.TYPE_TEXT_VARIATION_PERSON_NAME); childFirstNames.add(edtxtChildFirstName); //EditText to accept middle name EditText edtxtChildMiddleName = new EditText(getActivity().getBaseContext()); edtxtChildMiddleName.setHint("Middle Name"); edtxtChildMiddleName.setInputType(InputType.TYPE_TEXT_VARIATION_PERSON_NAME); childMiddleNames.add(edtxtChildMiddleName); //EditText to accept first name EditText edtxtChildLastName = new EditText(getActivity().getBaseContext()); edtxtChildLastName.setHint("Last Name"); edtxtChildLastName.setInputType(InputType.TYPE_TEXT_VARIATION_PERSON_NAME); childLastNames.add(edtxtChildLastName); //Add Name stuff to it's layout lytNameLayout.addView(tvNamePrompt); lytNameLayout.addView(edtxtChildFirstName); lytNameLayout.addView(edtxtChildMiddleName); lytNameLayout.addView(edtxtChildLastName); //Layout to hold the DOB stuff LinearLayout lytAgeLayout = new LinearLayout(getActivity().getBaseContext()); lytAgeLayout.setOrientation(LinearLayout.HORIZONTAL); //Textview to prompt the user to enter DOBs for their children TextView tvAgePrompt = new TextView(getActivity().getBaseContext()); tvAgePrompt.setText("Child's Age"); tvAgePrompt.setId(R.id.txtvw_dob_prompt); //Spinner to select age Spinner spnrAgeSelect = new Spinner(getActivity().getBaseContext()); ageLow = new ArrayList<Double>(); ageHigh = new ArrayList<Double>(); spnrAgeSelect.setOnItemSelectedListener(new OnItemSelectedListener( ) { @Override public void onItemSelected( AdapterView<?> arg0, View arg1, int arg2, long arg3) { // TODO Auto-generated method stub switch(arg2){ case 0:{ ageLow.add(-1.0); ageHigh.add(-1.0); break; } case 1:{ ageLow.add(0.0); ageHigh.add(0.5); break; } case 2:{ ageLow.add(0.5); ageHigh.add(1.0); break; } case 3:{ ageLow.add(1.0); ageHigh.add(2.0); break; } case 4:{ ageLow.add(2.0); ageHigh.add(3.0); break; } case 5:{ ageLow.add(3.0); ageHigh.add(4.0); break; } case 6:{ ageLow.add(4.0); ageHigh.add(5.0); break; } case 7:{ ageLow.add(5.0); ageHigh.add(12.0); break; } default: break; } } @Override public void onNothingSelected( AdapterView<?> arg0) { // TODO Auto-generated method stub } }); //Adapter to show the different age ranges ArrayAdapter<CharSequence> childrenAdapter = ArrayAdapter.createFromResource(getActivity().getBaseContext(), R.array.age_list, android.R.layout.simple_spinner_dropdown_item); childrenAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spnrAgeSelect.setAdapter(childrenAdapter); Log.v(TAG,"Spinner listener added and adapter added to children num"); //Add the DOB stuff to the DOB layout lytAgeLayout.addView(tvAgePrompt); lytAgeLayout.addView(spnrAgeSelect); switch(i){ case 0:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_1); edtxtChildFirstName.setId(R.id.txt_child_middle_name_1); edtxtChildFirstName.setId(R.id.txt_child_last_name_1); spnrAgeSelect.setId(R.id.spnr_child_age_1); break; } case 1:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_2); edtxtChildFirstName.setId(R.id.txt_child_middle_name_2); edtxtChildFirstName.setId(R.id.txt_child_last_name_2); spnrAgeSelect.setId(R.id.spnr_child_age_2); break; } case 2:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_3); edtxtChildFirstName.setId(R.id.txt_child_middle_name_3); edtxtChildFirstName.setId(R.id.txt_child_last_name_3); spnrAgeSelect.setId(R.id.spnr_child_age_3); break; } case 3:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_4); edtxtChildFirstName.setId(R.id.txt_child_middle_name_4); edtxtChildFirstName.setId(R.id.txt_child_last_name_4); spnrAgeSelect.setId(R.id.spnr_child_age_4); break; } case 4:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_5); edtxtChildFirstName.setId(R.id.txt_child_middle_name_5); edtxtChildFirstName.setId(R.id.txt_child_last_name_5); spnrAgeSelect.setId(R.id.spnr_child_age_5); break; } case 5:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_6); edtxtChildFirstName.setId(R.id.txt_child_middle_name_6); edtxtChildFirstName.setId(R.id.txt_child_last_name_6); spnrAgeSelect.setId(R.id.spnr_child_age_6); break; } case 6:{ edtxtChildFirstName.setId(R.id.txt_child_first_name_7); edtxtChildFirstName.setId(R.id.txt_child_middle_name_7); edtxtChildFirstName.setId(R.id.txt_child_last_name_7); spnrAgeSelect.setId(R.id.spnr_child_age_7); break; } default: break; } //Add the whole shebang to the main layout lytTempLayout.addView(tvChildLabel); lytTempLayout.addView(lytNameLayout); lytTempLayout.addView(lytAgeLayout); dynamicLayouts.add(lytTempLayout); } for(int i = 0; i<dynamicLayouts.size();i++){ lytNewLayout.addView(dynamicLayouts.get(i)); } lytDynamicLayoutBase.addView(lytNewLayout); } }); //Add the string array for the number of children to the spinner ArrayAdapter<CharSequence> ageAdapter= ArrayAdapter.createFromResource(getActivity().getBaseContext(), R.array.num_of_children, android.R.layout.simple_spinner_dropdown_item); ageAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spnrChildren.setAdapter(ageAdapter); Log.v(TAG,"Spinner listener added and adapter added to children num"); //Action on the button Button createAccount = ((Button)rootView.findViewById(R.id.btnCreate)); createAccount.setOnClickListener(new View.OnClickListener() { private String usernameInput, passwordInput, confirmPasswordInput, fNameInput, mNameInput, lNameInput, emailAddress, phoneNumber, zipCode; private ArrayList<String> childFName = new ArrayList<String>(); private ArrayList<String> childMName = new ArrayList<String>(); private ArrayList<String> childLName = new ArrayList<String>(); public void onClick(View v) { usernameInput = username.getText().toString(); try { passwordInput = AeSimpleSHA1.SHA1("salt"+password.getText().toString()); } catch (NoSuchAlgorithmException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } catch (UnsupportedEncodingException e2) { e2.printStackTrace(); } try { confirmPasswordInput = AeSimpleSHA1.SHA1("salt"+confirmPassword.getText().toString()); } catch (NoSuchAlgorithmException e1) { e1.printStackTrace(); } catch (UnsupportedEncodingException e1) { e1.printStackTrace(); } fNameInput = txtFName.getText().toString(); mNameInput = txtMName.getText().toString(); lNameInput = txtLName.getText().toString(); emailAddress = txtEmailAddress.getText().toString(); phoneNumber = txtPhoneNumer.getText().toString(); zipCode = txtZipCode.getText().toString(); for(int i = 0;i<numOfChildren;i++){ childFName.add(childFirstNames.get(i).getText().toString()); childMName.add(childMiddleNames.get(i).getText().toString()); childLName.add(childLastNames.get(i).getText().toString()); } if(DEBUG){ Log.v(TAG,"create clicked" + usernameInput + " " + fNameInput + " " + mNameInput + " " + lNameInput + " " + emailAddress ); } //Send the informationt to the DB only if passwords match if(passwordInput.equals(confirmPasswordInput)){ JSONObject user = new JSONObject(); try{ user.put("username",usernameInput); user.put("password",passwordInput); user.put("f_name",fNameInput); user.put("m_name",mNameInput); user.put("l_name",lNameInput); user.put("email_addr",emailAddress); user.put("phone_number",phoneNumber); user.put("zip", zipCode); for(int i = 0;i<numOfChildren;i++){ user.put("child_"+(i+1)+"_f_name",childFName.get(i)); user.put("child_"+(i+1)+"_m_name",childMName.get(i)); user.put("child_"+(i+1)+"_l_name",childLName.get(i)); user.put("child_"+(i+1)+"_age_range_low",ageLow.get(i)); user.put("child_"+(i+1)+"_age_range_high",ageHigh.get(i)); } } catch (JSONException e){ e.printStackTrace(); } new PostNewAcct().execute(user.toString()); } else{ //Display an alert dialog to tell the user the passwords don't match Toast toast = Toast.makeText(getActivity().getBaseContext(), "Passwords don't match!" , Toast.LENGTH_SHORT); toast.show(); } //Toast to show new user was created Toast toast = Toast.makeText(getActivity().getBaseContext(), "New User " + usernameInput+ " Created!" , Toast.LENGTH_SHORT); toast.show(); Intent i = new Intent(getActivity().getBaseContext(),LoginActivity.class); startActivity(i); } }); return rootView; } } return null; } public static void setChildrenLayout(int num){ } } public class SectionsPagerAdapter extends FragmentPagerAdapter{ public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int i) { Fragment fragment = new CreateAcctFragment(); Bundle args = new Bundle(); args.putInt(CreateAcctFragment.ARG_SECTION_NUMBER, i + 1); fragment.setArguments(args); return fragment; } @Override public int getCount() { return 3; } @Override public CharSequence getPageTitle(int position) { switch (position) { case 0: return getString(R.string.tab_login_info).toUpperCase(); case 1: return getString(R.string.tab_p_info).toUpperCase(); case 2: return getString(R.string.tab_children).toUpperCase(); } return null; } } private static final String TAG = "Child Care" +CreateAccountActivity.class.getSimpleName(); private static Spinner spnrEducation; private static OnItemSelectedListener spnrListener; private static boolean DEBUG = true; /** * The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the * sections. We use a {@link android.support.v4.app.FragmentPagerAdapter} derivative, which will * keep every loaded fragment in memory. If this becomes too memory intensive, it may be best * to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; // Create the adapter that will return a fragment for each of the three primary sections // of the app. /** * Creates the activity */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_create_account); mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the action bar. final ActionBar actionBar = getActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setOffscreenPageLimit(3); mViewPager.setAdapter(mSectionsPagerAdapter); // When swiping between different sections, select the corresponding tab. // We can also use ActionBar.Tab#select() to do this if we have a reference to the // Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by the adapter. // Also specify this Activity object, which implements the TabListener interface, as the // listener for when this tab is selected. actionBar.addTab( actionBar.newTab() .setText(mSectionsPagerAdapter.getPageTitle(i)) .setTabListener(this)); } } public void onTabReselected(Tab arg0, android.app.FragmentTransaction arg1) { // TODO Auto-generated method stub } public void onTabSelected(Tab arg0, android.app.FragmentTransaction arg1) { // TODO Auto-generated method stub mViewPager.setCurrentItem(arg0.getPosition()); } public void onTabUnselected(Tab arg0, android.app.FragmentTransaction arg1) { // TODO Auto-generated method stub } }
package com.ctrip.hermes.metaservice.service; import java.io.UnsupportedEncodingException; import java.util.*; import java.util.regex.Pattern; import org.I0Itec.zkclient.ZkClient; import org.I0Itec.zkclient.exception.ZkMarshallingError; import org.I0Itec.zkclient.serialize.ZkSerializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.unidal.dal.jdbc.DalException; import org.unidal.lookup.annotation.Inject; import org.unidal.lookup.annotation.Named; import com.ctrip.hermes.meta.entity.*; import com.ctrip.hermes.metaservice.service.storage.TopicStorageService; import com.ctrip.hermes.metaservice.service.storage.exception.StorageHandleErrorException; import com.ctrip.hermes.metaservice.service.storage.pojo.StoragePartition; import com.ctrip.hermes.metaservice.service.storage.pojo.StorageTable; import kafka.admin.AdminUtils; @Named public class TopicService { private static final Logger m_logger = LoggerFactory.getLogger(TopicService.class); @Inject private PortalMetaService m_metaService; @Inject private SchemaService m_schemaService; @Inject private TopicStorageService m_topicStorageService; @Inject private ZookeeperService m_zookeeperService; /** * @param topic * @return * @throws DalException */ public Topic createTopic(Topic topic) throws Exception { Meta meta = m_metaService.getMeta(); topic.setCreateTime(new Date(System.currentTimeMillis())); topic.setLastModifiedTime(topic.getCreateTime()); long maxTopicId = 0; for (Topic topic2 : meta.getTopics().values()) { if (topic2.getId() != null && topic2.getId() > maxTopicId) { maxTopicId = topic2.getId(); } } topic.setId(maxTopicId + 1); int partitionId = 0; for (Partition partition : topic.getPartitions()) { partition.setId(partitionId++); } meta.addTopic(topic); if (Storage.MYSQL.equals(topic.getStorageType())) { if (!m_topicStorageService.initTopicStorage(topic)) { m_logger.error("Init topic storage failed, please try later."); throw new RuntimeException("Init topic storage failed, please try later."); } m_zookeeperService.ensureConsumerLeaseZkPath(topic); m_zookeeperService.ensureBrokerLeaseZkPath(topic); } if (!m_metaService.updateMeta(meta)) { throw new RuntimeException("Update meta failed, please try later"); } return topic; } /** * @param topic */ public void createTopicInKafka(Topic topic) { List<Partition> partitions = m_metaService.findPartitionsByTopic(topic.getName()); if (partitions == null || partitions.size() < 1) { return; } String consumerDatasource = partitions.get(0).getReadDatasource(); Storage targetStorage = m_metaService.findStorageByTopic(topic.getName()); if (targetStorage == null) { return; } String zkConnect = null; for (Datasource datasource : targetStorage.getDatasources()) { if (consumerDatasource.equals(datasource.getId())) { Map<String, Property> properties = datasource.getProperties(); for (Map.Entry<String, Property> prop : properties.entrySet()) { if ("zookeeper.connect".equals(prop.getValue().getName())) { zkConnect = prop.getValue().getValue(); break; } } } } ZkClient zkClient = new ZkClient(zkConnect); zkClient.setZkSerializer(new ZKStringSerializer()); int partition = 1; int replication = 1; Properties topicProp = new Properties(); for (Property prop : topic.getProperties()) { if ("replication-factor".equals(prop.getName())) { replication = Integer.parseInt(prop.getValue()); } else if ("partitions".equals(prop.getName())) { partition = Integer.parseInt(prop.getValue()); } else if ("retention.ms".equals(prop.getName())) { topicProp.setProperty("retention.ms", prop.getValue()); } else if ("retention.bytes".equals(prop.getName())) { topicProp.setProperty("retention.bytes", prop.getValue()); } } m_logger.debug("create topic in kafka, topic {}, partition {}, replication {}, prop {}", topic.getName(), partition, replication, topicProp); AdminUtils.createTopic(zkClient, topic.getName(), partition, replication, topicProp); } /** * @param topic */ public void deleteTopicInKafka(Topic topic) { List<Partition> partitions = m_metaService.findPartitionsByTopic(topic.getName()); if (partitions == null || partitions.size() < 1) { return; } String consumerDatasource = partitions.get(0).getReadDatasource(); Storage targetStorage = m_metaService.findStorageByTopic(topic.getName()); if (targetStorage == null) { return; } String zkConnect = null; for (Datasource datasource : targetStorage.getDatasources()) { if (consumerDatasource.equals(datasource.getId())) { Map<String, Property> properties = datasource.getProperties(); for (Map.Entry<String, Property> prop : properties.entrySet()) { if ("zookeeper.connect".equals(prop.getValue().getName())) { zkConnect = prop.getValue().getValue(); break; } } } } ZkClient zkClient = new ZkClient(zkConnect); zkClient.setZkSerializer(new ZKStringSerializer()); m_logger.debug("delete topic in kafka, topic {}", topic.getName()); AdminUtils.deleteTopic(zkClient, topic.getName()); } /** * @param topic */ public void configTopicInKafka(Topic topic) { List<Partition> partitions = m_metaService.findPartitionsByTopic(topic.getName()); if (partitions == null || partitions.size() < 1) { return; } String consumerDatasource = partitions.get(0).getReadDatasource(); Storage targetStorage = m_metaService.findStorageByTopic(topic.getName()); if (targetStorage == null) { return; } String zkConnect = null; for (Datasource datasource : targetStorage.getDatasources()) { if (consumerDatasource.equals(datasource.getId())) { Map<String, Property> properties = datasource.getProperties(); for (Map.Entry<String, Property> prop : properties.entrySet()) { if ("zookeeper.connect".equals(prop.getValue().getName())) { zkConnect = prop.getValue().getValue(); break; } } } } ZkClient zkClient = new ZkClient(zkConnect); zkClient.setZkSerializer(new ZKStringSerializer()); Properties topicProp = new Properties(); for (Property prop : topic.getProperties()) { if ("retention.ms".equals(prop.getName())) { topicProp.setProperty("retention.ms", prop.getValue()); } else if ("retention.bytes".equals(prop.getName())) { topicProp.setProperty("retention.bytes", prop.getValue()); } } m_logger.debug("config topic in kafka, topic {}, prop {}", topic.getName(), topicProp); AdminUtils.changeTopicConfig(zkClient, topic.getName(), topicProp); } /** * @param name * @throws DalException */ public void deleteTopic(String name) throws Exception { Meta meta = m_metaService.getMeta(); Topic topic = meta.findTopic(name); if (topic == null) return; meta.removeTopic(name); // Remove related schemas if (topic.getId() != null && topic.getId() > 0) { try { m_schemaService.deleteSchemas(topic); } catch (Throwable e) { m_logger.error(String.format("delete schema failed for topic: %s", topic.getName()), e); } } if (Storage.MYSQL.equals(topic.getStorageType())) { try { m_topicStorageService.dropTopicStorage(topic); m_zookeeperService.deleteConsumerLeaseTopicParentZkPath(topic.getName()); m_zookeeperService.deleteBrokerLeaseTopicParentZkPath(topic.getName()); m_zookeeperService.deleteMetaServerAssignmentZkPath(topic.getName()); } catch (Exception e) { if (e instanceof StorageHandleErrorException) { m_logger.warn("Delete topic tables failed", e); } else { throw new RuntimeException("Delete topic failed.", e); } } } m_metaService.updateMeta(meta); } public List<Topic> findTopics(String pattern) { List<Topic> filtered = new ArrayList<Topic>(); for (Topic topic : m_metaService.getTopics().values()) { if (Pattern.matches(pattern, topic.getName())) { filtered.add(topic); } } Collections.sort(filtered, new Comparator<Topic>() { @Override public int compare(Topic o1, Topic o2) { return o1.getName().compareTo(o2.getName()); } }); return filtered; } public Topic findTopicById(long topicId) { return m_metaService.findTopicById(topicId); } public Topic findTopicByName(String topicName) { return m_metaService.findTopicByName(topicName); } /** * @param topic * @return * @throws DalException */ public Topic updateTopic(Topic topic) throws DalException { Meta meta = m_metaService.getMeta(); meta.removeTopic(topic.getName()); topic.setLastModifiedTime(new Date(System.currentTimeMillis())); meta.addTopic(topic); if (Storage.MYSQL.equals(topic.getStorageType())) { m_zookeeperService.ensureConsumerLeaseZkPath(topic); m_zookeeperService.ensureBrokerLeaseZkPath(topic); } m_metaService.updateMeta(meta); return topic; } public Integer queryStorageSize(String ds) throws StorageHandleErrorException { return m_topicStorageService.queryStorageSize(ds); } public Integer queryStorageSize(String ds, String table) throws StorageHandleErrorException { return m_topicStorageService.queryStorageSize(ds, table); } public List<StorageTable> queryStorageTables(String ds) throws StorageHandleErrorException { return m_topicStorageService.queryStorageTables(ds); } public List<StoragePartition> queryStorageTablePartitions(String ds, String table) throws StorageHandleErrorException { return m_topicStorageService.queryTablePartitions(ds, table); } public void addPartition(String ds, String table, int span) throws StorageHandleErrorException { m_topicStorageService.addPartitionStorage(ds, table, span); } public void delPartition(String ds, String table) throws StorageHandleErrorException { m_topicStorageService.delPartitionStorage(ds, table); } } class ZKStringSerializer implements ZkSerializer { @Override public Object deserialize(byte[] bytes) throws ZkMarshallingError { if (bytes == null) return null; else try { return new String(bytes, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new ZkMarshallingError(e); } } @Override public byte[] serialize(Object data) throws ZkMarshallingError { byte[] bytes = null; try { bytes = data.toString().getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { throw new ZkMarshallingError(e); } return bytes; } }
/* * Copyright (c) 2019 Abex * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.ui; import java.awt.Color; import java.awt.Container; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.lang.reflect.InvocationTargetException; import javax.annotation.Nullable; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JProgressBar; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.Timer; import javax.swing.UIManager; import javax.swing.border.EmptyBorder; import javax.swing.plaf.basic.BasicProgressBarUI; import lombok.extern.slf4j.Slf4j; import net.runelite.client.ui.skin.SubstanceRuneLiteLookAndFeel; import net.runelite.client.util.ImageUtil; import org.pushingpixels.substance.internal.SubstanceSynapse; @Slf4j public class SplashScreen extends JFrame implements ActionListener { private static final int WIDTH = 200; private static final int PAD = 10; private static SplashScreen INSTANCE; private final JLabel action = new JLabel("Loading"); private final JProgressBar progress = new JProgressBar(); private final JLabel subAction = new JLabel(); private final Timer timer; private volatile double overallProgress = 0; private volatile String actionText = "Loading"; private volatile String subActionText = ""; private volatile String progressText = null; private SplashScreen() { BufferedImage logo = ImageUtil.loadImageResource(SplashScreen.class, "runelite_transparent.png"); setTitle("RuneLite Launcher"); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setUndecorated(true); setIconImage(logo); setLayout(null); Container pane = getContentPane(); pane.setBackground(ColorScheme.DARKER_GRAY_COLOR); Font font = new Font(Font.DIALOG, Font.PLAIN, 12); JLabel logoLabel = new JLabel(new ImageIcon(logo)); pane.add(logoLabel); logoLabel.setBounds(0, 0, WIDTH, WIDTH); int y = WIDTH; pane.add(action); action.setForeground(Color.WHITE); action.setBounds(0, y, WIDTH, 16); action.setHorizontalAlignment(SwingConstants.CENTER); action.setFont(font); y += action.getHeight() + PAD; pane.add(progress); progress.setForeground(ColorScheme.BRAND_ORANGE); progress.setBackground(ColorScheme.BRAND_ORANGE.darker().darker()); progress.setBorder(new EmptyBorder(0, 0, 0, 0)); progress.setBounds(0, y, WIDTH, 14); progress.setFont(font); progress.setUI(new BasicProgressBarUI() { @Override protected Color getSelectionBackground() { return Color.BLACK; } @Override protected Color getSelectionForeground() { return Color.BLACK; } }); y += 12 + PAD; pane.add(subAction); subAction.setForeground(Color.LIGHT_GRAY); subAction.setBounds(0, y, WIDTH, 16); subAction.setHorizontalAlignment(SwingConstants.CENTER); subAction.setFont(font); y += subAction.getHeight() + PAD; setSize(WIDTH, y); setLocationRelativeTo(null); timer = new Timer(100, this); timer.setRepeats(true); timer.start(); setVisible(true); } @Override public void actionPerformed(ActionEvent e) { action.setText(actionText); subAction.setText(subActionText); progress.setMaximum(1000); progress.setValue((int) (overallProgress * 1000)); String progressText = this.progressText; if (progressText == null) { progress.setStringPainted(false); } else { progress.setStringPainted(true); progress.setString(progressText); } } public static boolean isOpen() { return INSTANCE != null; } public static void init() { try { SwingUtilities.invokeAndWait(() -> { if (INSTANCE != null) { return; } try { boolean hasLAF = UIManager.getLookAndFeel() instanceof SubstanceRuneLiteLookAndFeel; if (!hasLAF) { UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); } INSTANCE = new SplashScreen(); if (hasLAF) { INSTANCE.getRootPane().putClientProperty(SubstanceSynapse.COLORIZATION_FACTOR, 1.0); } } catch (Exception e) { log.warn("Unable to start splash screen", e); } }); } catch (InterruptedException | InvocationTargetException bs) { throw new RuntimeException(bs); } } public static void stop() { SwingUtilities.invokeLater(() -> { if (INSTANCE == null) { return; } INSTANCE.timer.stop(); // The CLOSE_ALL_WINDOWS quit strategy on MacOS dispatches WINDOW_CLOSING events to each frame // from Window.getWindows. However, getWindows uses weak refs and relies on gc to remove windows // from its list, causing events to get dispatched to disposed frames. The frames handle the events // regardless of being disposed and will run the configured close operation. Set the close operation // to DO_NOTHING_ON_CLOSE prior to disposing to prevent this. INSTANCE.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); INSTANCE.dispose(); INSTANCE = null; }); } public static void stage(double overallProgress, @Nullable String actionText, String subActionText) { stage(overallProgress, actionText, subActionText, null); } public static void stage(double startProgress, double endProgress, @Nullable String actionText, String subActionText, int done, int total, boolean mib) { String progress; if (mib) { final double MiB = 1024 * 1024; final double CEIL = 1.d / 10.d; progress = String.format("%.1f / %.1f MiB", done / MiB, (total / MiB) + CEIL); } else { progress = done + " / " + total; } stage(startProgress + ((endProgress - startProgress) * done / total), actionText, subActionText, progress); } public static void stage(double overallProgress, @Nullable String actionText, String subActionText, @Nullable String progressText) { if (INSTANCE != null) { INSTANCE.overallProgress = overallProgress; if (actionText != null) { INSTANCE.actionText = actionText; } INSTANCE.subActionText = subActionText; INSTANCE.progressText = progressText; } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.python; import com.facebook.buck.cli.BuckConfig; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkStrategy; import com.facebook.buck.io.ExecutableFinder; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuckVersion; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.VersionedTool; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.PackagedResource; import com.facebook.buck.util.ProcessExecutor; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; import javax.annotation.Nonnull; public class PythonBuckConfig { public static final Flavor DEFAULT_PYTHON_PLATFORM = InternalFlavor.of("py-default"); private static final String SECTION = "python"; private static final String PYTHON_PLATFORM_SECTION_PREFIX = "python#"; // Prefer "python2" where available (Linux), but fall back to "python" (Mac). private static final ImmutableList<String> PYTHON_INTERPRETER_NAMES = ImmutableList.of("python2", "python"); private static final Path DEFAULT_PATH_TO_PEX = Paths.get(System.getProperty("buck.path_to_pex", "src/com/facebook/buck/python/make_pex.py")) .toAbsolutePath(); private static final LoadingCache<ProjectFilesystem, PathSourcePath> PATH_TO_TEST_MAIN = CacheBuilder.newBuilder() .build( new CacheLoader<ProjectFilesystem, PathSourcePath>() { @Override public PathSourcePath load(@Nonnull ProjectFilesystem filesystem) { return new PathSourcePath( filesystem, PythonBuckConfig.class + "/__test_main__.py", new PackagedResource(filesystem, PythonBuckConfig.class, "__test_main__.py")); } }); private final BuckConfig delegate; private final ExecutableFinder exeFinder; public PythonBuckConfig(BuckConfig config, ExecutableFinder exeFinder) { this.delegate = config; this.exeFinder = exeFinder; } @VisibleForTesting protected PythonPlatform getDefaultPythonPlatform(ProcessExecutor executor) throws InterruptedException { return getPythonPlatform(executor, SECTION, DEFAULT_PYTHON_PLATFORM); } /** * Constructs set of Python platform flavors given in a .buckconfig file, as is specified by * section names of the form python#{flavor name}. */ public ImmutableList<PythonPlatform> getPythonPlatforms(ProcessExecutor processExecutor) throws InterruptedException { ImmutableList.Builder<PythonPlatform> builder = ImmutableList.builder(); // Add the python platform described in the top-level section first. builder.add(getDefaultPythonPlatform(processExecutor)); // Then add all additional python platform described in the extended sections. for (String section : delegate.getSections()) { if (section.startsWith(PYTHON_PLATFORM_SECTION_PREFIX)) { builder.add( getPythonPlatform( processExecutor, section, InternalFlavor.of(section.substring(PYTHON_PLATFORM_SECTION_PREFIX.length())))); } } return builder.build(); } private PythonPlatform getPythonPlatform( ProcessExecutor processExecutor, String section, Flavor flavor) throws InterruptedException { return PythonPlatform.of( flavor, getPythonEnvironment(processExecutor, section), delegate.getBuildTarget(section, "library")); } private Path findInterpreter(ImmutableList<String> interpreterNames) { Preconditions.checkArgument(!interpreterNames.isEmpty()); for (String interpreterName : interpreterNames) { Optional<Path> python = exeFinder.getOptionalExecutable(Paths.get(interpreterName), delegate.getEnvironment()); if (python.isPresent()) { return python.get().toAbsolutePath(); } } throw new HumanReadableException( "No python interpreter found (searched %s).", Joiner.on(", ").join(interpreterNames)); } /** * Returns the path to python interpreter. If python is specified in 'interpreter' key of the * 'python' section that is used and an error reported if invalid. * * @return The found python interpreter. */ public Path getPythonInterpreter(Optional<String> config) { if (!config.isPresent()) { return findInterpreter(PYTHON_INTERPRETER_NAMES); } Path configPath = Paths.get(config.get()); if (!configPath.isAbsolute()) { return findInterpreter(ImmutableList.of(config.get())); } return configPath; } private Path getPythonInterpreter(String section) { return getPythonInterpreter(delegate.getValue(section, "interpreter")); } /** @return the {@link Path} to the default python interpreter. */ public Path getPythonInterpreter() { return getPythonInterpreter(SECTION); } private PythonEnvironment getPythonEnvironment(ProcessExecutor processExecutor, String section) throws InterruptedException { Path pythonPath = getPythonInterpreter(section); PythonVersion pythonVersion = getVersion(processExecutor, section, pythonPath); return new PythonEnvironment(pythonPath, pythonVersion); } public PythonEnvironment getPythonEnvironment(ProcessExecutor processExecutor) throws InterruptedException { return getPythonEnvironment(processExecutor, SECTION); } public SourcePath getPathToTestMain(ProjectFilesystem filesystem) { return PATH_TO_TEST_MAIN.getUnchecked(filesystem); } public Optional<BuildTarget> getPexTarget() { return delegate.getMaybeBuildTarget(SECTION, "path_to_pex"); } public Tool getPexTool(BuildRuleResolver resolver) { CommandTool.Builder builder = new CommandTool.Builder(getRawPexTool(resolver)); for (String flag : Splitter.on(' ') .omitEmptyStrings() .split(delegate.getValue(SECTION, "pex_flags").orElse(""))) { builder.addArg(flag); } return builder.build(); } private Tool getRawPexTool(BuildRuleResolver resolver) { Optional<Tool> executable = delegate.getTool(SECTION, "path_to_pex", resolver); if (executable.isPresent()) { return executable.get(); } return VersionedTool.builder() .setName("pex") .setVersion(BuckVersion.getVersion()) .setPath(getPythonInterpreter(SECTION)) .addExtraArgs(DEFAULT_PATH_TO_PEX.toString()) .build(); } public Optional<BuildTarget> getPexExecutorTarget() { return delegate.getMaybeBuildTarget(SECTION, "path_to_pex_executer"); } public Optional<Tool> getPexExecutor(BuildRuleResolver resolver) { return delegate.getTool(SECTION, "path_to_pex_executer", resolver); } public NativeLinkStrategy getNativeLinkStrategy() { return delegate .getEnum(SECTION, "native_link_strategy", NativeLinkStrategy.class) .orElse(NativeLinkStrategy.SEPARATE); } public String getPexExtension() { return delegate.getValue(SECTION, "pex_extension").orElse(".pex"); } private Optional<PythonVersion> getConfiguredVersion(String section) { return delegate.getValue(section, "version").map(PythonVersion::fromString); } private PythonVersion getVersion(ProcessExecutor processExecutor, String section, Path path) throws InterruptedException { Optional<PythonVersion> configuredVersion = getConfiguredVersion(section); if (configuredVersion.isPresent()) { return configuredVersion.get(); } return PythonVersion.fromInterpreter(processExecutor, path); } public boolean shouldCacheBinaries() { return delegate.getBooleanValue(SECTION, "cache_binaries", true); } public boolean legacyOutputPath() { return delegate.getBooleanValue(SECTION, "legacy_output_path", false); } public PackageStyle getPackageStyle() { return delegate .getEnum(SECTION, "package_style", PackageStyle.class) .orElse(PackageStyle.STANDALONE); } public enum PackageStyle { STANDALONE, INPLACE, } }
/* * Copyright 2018-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.FakeTargetNodeBuilder.FakeDescription; import com.facebook.buck.testutil.TargetGraphFactory; import com.facebook.buck.util.RichStream; import com.google.common.collect.ImmutableSortedSet; import java.util.SortedSet; import java.util.stream.Collectors; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class ActionGraphNodeCacheTest { @Rule public ExpectedException expectedException = ExpectedException.none(); private ActionGraphNodeCache cache; private TargetGraph targetGraph; private BuildRuleResolver ruleResolver; @Before public void setUp() { cache = new ActionGraphNodeCache(100); } @Test public void cacheableRuleCached() { TargetNode<?, ?> node = createTargetNode("test1"); setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); assertTrue(cache.containsKey(node)); assertTrue(ruleResolver.getRuleOptional(node.getBuildTarget()).isPresent()); } @Test public void uncacheableRuleNotCached() { TargetNode<?, ?> node = createUncacheableTargetNode("test1"); setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); assertFalse(cache.containsKey(node)); assertTrue(ruleResolver.getRuleOptional(node.getBuildTarget()).isPresent()); } @Test public void cacheableRuleWithUncacheableChildNotCached() { TargetNode<?, ?> childNode = createUncacheableTargetNode("child"); TargetNode<?, ?> parentNode = createTargetNode("parent", childNode); setUpTargetGraphAndResolver(parentNode, childNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(childNode); cache.requireRule(parentNode); cache.finishTargetGraphWalk(); assertFalse(cache.containsKey(childNode)); assertTrue(ruleResolver.getRuleOptional(childNode.getBuildTarget()).isPresent()); assertFalse(cache.containsKey(parentNode)); assertTrue(ruleResolver.getRuleOptional(parentNode.getBuildTarget()).isPresent()); } @Test public void cachedNodesLruEvicted() { cache = new ActionGraphNodeCache(2); TargetNode<?, ?> node1 = createTargetNode("test1"); TargetNode<?, ?> node2 = createTargetNode("test2"); TargetNode<?, ?> node3 = createTargetNode("test3"); setUpTargetGraphAndResolver(node1, node2, node3); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node1); cache.requireRule(node2); cache.requireRule(node3); cache.finishTargetGraphWalk(); assertFalse(cache.containsKey(node1)); assertTrue(cache.containsKey(node2)); assertTrue(cache.containsKey(node3)); } @Test public void buildRuleForUnchangedTargetLoadedFromCache() { TargetNode<?, ?> originalNode = createTargetNode("test1"); setUpTargetGraphAndResolver(originalNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule originalBuildRule = cache.requireRule(originalNode); cache.finishTargetGraphWalk(); TargetNode<?, ?> newNode = createTargetNode("test1"); assertEquals(originalNode, newNode); setUpTargetGraphAndResolver(newNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule newBuildRule = cache.requireRule(newNode); cache.finishTargetGraphWalk(); assertSame(originalBuildRule, newBuildRule); assertTrue(ruleResolver.getRuleOptional(newNode.getBuildTarget()).isPresent()); assertSame(originalBuildRule, ruleResolver.getRule(newNode.getBuildTarget())); } @Test public void buildRuleForChangedTargetNotLoadedFromCache() { TargetNode<?, ?> originalNode = createTargetNode("test1"); setUpTargetGraphAndResolver(originalNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule originalBuildRule = cache.requireRule(originalNode); cache.finishTargetGraphWalk(); TargetNode<?, ?> depNode = createTargetNode("test2"); TargetNode<?, ?> newNode = createTargetNode("test1", depNode); setUpTargetGraphAndResolver(newNode, depNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule newBuildRule = cache.requireRule(newNode); cache.finishTargetGraphWalk(); assertNotSame(originalBuildRule, newBuildRule); assertTrue(ruleResolver.getRuleOptional(newNode.getBuildTarget()).isPresent()); assertNotSame(originalBuildRule, ruleResolver.getRule(newNode.getBuildTarget())); } @Test public void allParentChainsForChangedTargetInvalidated() { TargetNode<?, ?> originalChildNode = createTargetNode("child"); TargetNode<?, ?> originalParentNode1 = createTargetNode("parent1", originalChildNode); TargetNode<?, ?> originalParentNode2 = createTargetNode("parent2", originalChildNode); setUpTargetGraphAndResolver(originalParentNode1, originalParentNode2, originalChildNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(originalChildNode); BuildRule originalParentBuildRule1 = cache.requireRule(originalParentNode1); BuildRule originalParentBuildRule2 = cache.requireRule(originalParentNode2); cache.finishTargetGraphWalk(); TargetNode<?, ?> newChildNode = createTargetNode("child", "new_label"); TargetNode<?, ?> newParentNode1 = createTargetNode("parent1", newChildNode); TargetNode<?, ?> newParentNode2 = createTargetNode("parent2", newChildNode); setUpTargetGraphAndResolver(newParentNode1, newParentNode2, newChildNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(newChildNode); cache.requireRule(newParentNode1); cache.requireRule(newParentNode2); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(newParentNode1.getBuildTarget()).isPresent()); assertNotSame(originalParentBuildRule1, ruleResolver.getRule(newParentNode1.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(newParentNode2.getBuildTarget()).isPresent()); assertNotSame(originalParentBuildRule2, ruleResolver.getRule(newParentNode2.getBuildTarget())); } @Test public void buildRuleSubtreeForCachedTargetAddedToResolver() { FakeCacheableBuildRule buildRuleDep1 = new FakeCacheableBuildRule("test1#flav1"); FakeCacheableBuildRule buildRuleDep2 = new FakeCacheableBuildRule("test1#flav2"); FakeCacheableBuildRule buildRule = new FakeCacheableBuildRule("test1", buildRuleDep1, buildRuleDep2); TargetNode<?, ?> node = createTargetNode(buildRule); setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); BuildRuleResolver newRuleResolver = createBuildRuleResolver(targetGraph); cache.prepareForTargetGraphWalk(targetGraph, newRuleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); assertTrue(newRuleResolver.getRuleOptional(buildRule.getBuildTarget()).isPresent()); assertTrue(newRuleResolver.getRuleOptional(buildRuleDep1.getBuildTarget()).isPresent()); assertTrue(newRuleResolver.getRuleOptional(buildRuleDep2.getBuildTarget()).isPresent()); } @Test public void changedCacheableNodeInvalidatesParentChain() { TargetNode<?, ?> originalChildNode1 = createTargetNode("child1"); TargetNode<?, ?> originalChildNode2 = createTargetNode("child2"); TargetNode<?, ?> originalParentNode = createTargetNode("parent", originalChildNode1, originalChildNode2); setUpTargetGraphAndResolver(originalParentNode, originalChildNode1, originalChildNode2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule originalChildRule1 = cache.requireRule(originalChildNode1); BuildRule originalChildRule2 = cache.requireRule(originalChildNode2); BuildRule originalParentRule = cache.requireRule(originalParentNode); cache.finishTargetGraphWalk(); TargetNode<?, ?> newChildNode1 = createTargetNode("child1", "new_label"); TargetNode<?, ?> newChildNode2 = createTargetNode("child2"); TargetNode<?, ?> newParentNode = createTargetNode("parent", newChildNode1, newChildNode2); setUpTargetGraphAndResolver(newParentNode, newChildNode1, newChildNode2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(newChildNode1); cache.requireRule(newChildNode2); cache.requireRule(newParentNode); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(newParentNode.getBuildTarget()).isPresent()); assertNotSame(originalParentRule, ruleResolver.getRule(newParentNode.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(newChildNode1.getBuildTarget()).isPresent()); assertNotSame(originalChildRule1, ruleResolver.getRule(newChildNode1.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(newChildNode2.getBuildTarget()).isPresent()); assertSame(originalChildRule2, ruleResolver.getRule(newChildNode2.getBuildTarget())); } @Test public void uncacheableNodeInvalidatesParentChain() { TargetNode<?, ?> originalChildNode1 = createUncacheableTargetNode("child1"); TargetNode<?, ?> originalChildNode2 = createTargetNode("child2"); TargetNode<?, ?> originalParentNode = createTargetNode("parent", originalChildNode1, originalChildNode2); setUpTargetGraphAndResolver(originalParentNode, originalChildNode1, originalChildNode2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); BuildRule originalChildRule1 = cache.requireRule(originalChildNode1); BuildRule originalChildRule2 = cache.requireRule(originalChildNode2); BuildRule originalParentRule = cache.requireRule(originalParentNode); cache.finishTargetGraphWalk(); TargetNode<?, ?> newChildNode1 = createUncacheableTargetNode("child1"); TargetNode<?, ?> newChildNode2 = createTargetNode("child2"); TargetNode<?, ?> newParentNode = createTargetNode("parent", newChildNode1, newChildNode2); setUpTargetGraphAndResolver(newParentNode, newChildNode1, newChildNode2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(newChildNode1); cache.requireRule(newChildNode2); cache.requireRule(newParentNode); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(newParentNode.getBuildTarget()).isPresent()); assertNotSame(originalParentRule, ruleResolver.getRule(newParentNode.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(newChildNode1.getBuildTarget()).isPresent()); assertNotSame(originalChildRule1, ruleResolver.getRule(newChildNode1.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(newChildNode2.getBuildTarget()).isPresent()); assertSame(originalChildRule2, ruleResolver.getRule(newChildNode2.getBuildTarget())); } @Test public void cachedParentInvalidatedIfPreviouslyCachedChildPushedOutOfCache() { cache = new ActionGraphNodeCache(2); TargetNode<?, ?> childNode1 = createTargetNode("child1"); TargetNode<?, ?> childNode2 = createTargetNode("child2"); TargetNode<?, ?> parentNode = createTargetNode("parent", childNode1, childNode2); setUpTargetGraphAndResolver(parentNode, childNode1, childNode2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(childNode1); cache.requireRule(childNode2); cache.requireRule(parentNode); cache.finishTargetGraphWalk(); assertFalse(cache.containsKey(childNode1)); assertTrue(cache.containsKey(childNode2)); assertTrue(cache.containsKey(parentNode)); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.finishTargetGraphWalk(); assertFalse(cache.containsKey(parentNode)); } @Test public void allTargetGraphDepTypesAddedToIndexForCachedNode() { TargetNode<?, ?> declaredChildNode = createTargetNodeBuilder("declared").build(); TargetNode<?, ?> extraChildNode = createTargetNodeBuilder("extra").build(); TargetNode<?, ?> targetGraphOnlyChildNode = createTargetNodeBuilder("target_graph_only").build(); TargetNode<?, ?> parentNode = createTargetNodeBuilder("parent") .setDeps(declaredChildNode) .setExtraDeps(extraChildNode) .setTargetGraphOnlyDeps(targetGraphOnlyChildNode) .build(); setUpTargetGraphAndResolver( parentNode, declaredChildNode, extraChildNode, targetGraphOnlyChildNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(declaredChildNode); cache.requireRule(extraChildNode); cache.requireRule(targetGraphOnlyChildNode); cache.requireRule(parentNode); cache.finishTargetGraphWalk(); assertTrue(cache.containsKey(parentNode)); setUpTargetGraphAndResolver( parentNode, declaredChildNode, extraChildNode, targetGraphOnlyChildNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(parentNode); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(declaredChildNode.getBuildTarget()).isPresent()); assertTrue(ruleResolver.getRuleOptional(extraChildNode.getBuildTarget()).isPresent()); assertTrue(ruleResolver.getRuleOptional(targetGraphOnlyChildNode.getBuildTarget()).isPresent()); } @Test public void cachedNodeUsesLastRuleResolverForRuntimeDeps() { FakeCacheableBuildRule childBuildRule = new FakeCacheableBuildRule("test#child"); SortedSet<BuildTarget> runtimeDeps = ImmutableSortedSet.of(childBuildRule.getBuildTarget()); FakeCacheableBuildRule parentBuildRule = new FakeCacheableBuildRule("test", runtimeDeps); TargetNode<?, ?> originalNode = createTargetNode(parentBuildRule); setUpTargetGraphAndResolver(originalNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); ruleResolver.addToIndex(childBuildRule); cache.requireRule(originalNode); cache.finishTargetGraphWalk(); FakeCacheableBuildRule newParentBuildRule = new FakeCacheableBuildRule("test", runtimeDeps); TargetNode<?, ?> newNode = createTargetNode(newParentBuildRule); setUpTargetGraphAndResolver(newNode); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(newNode); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(newNode.getBuildTarget()).isPresent()); assertSame(parentBuildRule, ruleResolver.getRule(originalNode.getBuildTarget())); assertTrue(ruleResolver.getRuleOptional(childBuildRule.getBuildTarget()).isPresent()); assertSame(childBuildRule, ruleResolver.getRule(childBuildRule.getBuildTarget())); } @Test public void ruleResolversUpdatedForCachedNodeSubtreeLoadedFromCache() { FakeCacheableBuildRule buildRuleDep1 = new FakeCacheableBuildRule("test1#flav1"); FakeCacheableBuildRule buildRuleDep2 = new FakeCacheableBuildRule("test1#flav2"); FakeCacheableBuildRule buildRule = new FakeCacheableBuildRule("test1", buildRuleDep1, buildRuleDep2); TargetNode<?, ?> node = createTargetNode(buildRule); setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); BuildRuleResolver newRuleResolver = createBuildRuleResolver(targetGraph); cache.prepareForTargetGraphWalk(targetGraph, newRuleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); assertSame(newRuleResolver, buildRule.getRuleResolver()); assertSame(newRuleResolver, buildRuleDep1.getRuleResolver()); assertSame(newRuleResolver, buildRuleDep2.getRuleResolver()); } @Test public void ruleResolversUpdatedForCachedNodeSubtreeNotLoadedFromCache() { FakeCacheableBuildRule buildRuleDep1 = new FakeCacheableBuildRule("test1#flav1"); FakeCacheableBuildRule buildRuleDep2 = new FakeCacheableBuildRule("test1#flav2"); FakeCacheableBuildRule buildRule = new FakeCacheableBuildRule("test1", buildRuleDep1, buildRuleDep2); TargetNode<?, ?> node1 = createTargetNode(buildRule); TargetNode<?, ?> node2 = createTargetNode("test2"); setUpTargetGraphAndResolver(node1, node2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node1); cache.requireRule(node2); cache.finishTargetGraphWalk(); BuildRuleResolver newRuleResolver = createBuildRuleResolver(targetGraph); cache.prepareForTargetGraphWalk(targetGraph, newRuleResolver); cache.requireRule(node2); cache.finishTargetGraphWalk(); assertSame(newRuleResolver, buildRule.getRuleResolver()); assertSame(newRuleResolver, buildRuleDep1.getRuleResolver()); assertSame(newRuleResolver, buildRuleDep2.getRuleResolver()); } @Test public void lastRuleResolverInvalidatedAfterTargetGraphWalk() { expectedException.expect(IllegalStateException.class); TargetNode<?, ?> node = createTargetNode("node"); setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); BuildRuleResolver oldRuleResolver = ruleResolver; setUpTargetGraphAndResolver(node); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node); cache.finishTargetGraphWalk(); oldRuleResolver.getRuleOptional(node.getBuildTarget()); } @Test public void runtimeDepsForOldCachedNodeLoadedFromCache() { FakeCacheableBuildRule childBuildRule = new FakeCacheableBuildRule("test#child"); SortedSet<BuildTarget> runtimeDeps = ImmutableSortedSet.of(childBuildRule.getBuildTarget()); FakeCacheableBuildRule parentBuildRule = new FakeCacheableBuildRule("test", runtimeDeps); TargetNode<?, ?> node1 = createTargetNode(parentBuildRule); setUpTargetGraphAndResolver(node1); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node1); ruleResolver.addToIndex(childBuildRule); cache.finishTargetGraphWalk(); TargetNode<?, ?> node2 = createTargetNode("test2"); setUpTargetGraphAndResolver(node2); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node2); cache.finishTargetGraphWalk(); setUpTargetGraphAndResolver(node1); cache.prepareForTargetGraphWalk(targetGraph, ruleResolver); cache.requireRule(node1); cache.finishTargetGraphWalk(); assertTrue(ruleResolver.getRuleOptional(childBuildRule.getBuildTarget()).isPresent()); assertSame(childBuildRule, ruleResolver.getRuleOptional(childBuildRule.getBuildTarget()).get()); } private FakeTargetNodeBuilder createTargetNodeBuilder(String name) { BuildTarget buildTarget = BuildTargetFactory.newInstance("//test:" + name); return FakeTargetNodeBuilder.newBuilder( new FakeDescription() { @Override public BuildRule createBuildRule( BuildRuleCreationContext context, BuildTarget buildTarget, BuildRuleParams params, FakeTargetNodeArg args) { return new FakeCacheableBuildRule(buildTarget, context.getProjectFilesystem(), params); } }, buildTarget); } private TargetNode<?, ?> createTargetNode(String name, TargetNode<?, ?>... deps) { return createTargetNode(name, null, deps); } private TargetNode<?, ?> createTargetNode(String name, String label, TargetNode<?, ?>... deps) { return createTargetNode( name, label, new FakeDescription() { @Override public BuildRule createBuildRule( BuildRuleCreationContext context, BuildTarget buildTarget, BuildRuleParams params, FakeTargetNodeArg args) { return new FakeCacheableBuildRule(buildTarget, context.getProjectFilesystem(), params); } }, deps); } private TargetNode<?, ?> createTargetNode( String name, String label, FakeDescription description, TargetNode<?, ?>... deps) { FakeTargetNodeBuilder targetNodeBuilder = FakeTargetNodeBuilder.newBuilder( description, BuildTargetFactory.newInstance("//test:" + name)); for (TargetNode<?, ?> dep : deps) { targetNodeBuilder.getArgForPopulating().addDeps(dep.getBuildTarget()); } if (label != null) { targetNodeBuilder.getArgForPopulating().addLabels(label); } return targetNodeBuilder.build(); } private TargetNode<?, ?> createTargetNode(BuildRule buildRule, TargetNode<?, ?>... deps) { FakeTargetNodeBuilder builder = FakeTargetNodeBuilder.newBuilder(buildRule); builder .getArgForPopulating() .setDeps(RichStream.from(deps).map(t -> t.getBuildTarget()).collect(Collectors.toList())); return builder.build(); } private TargetNode<?, ?> createUncacheableTargetNode(String target) { return createTargetNode( target, null, new FakeDescription() { @Override public BuildRule createBuildRule( BuildRuleCreationContext context, BuildTarget buildTarget, BuildRuleParams params, FakeTargetNodeArg args) { BuildRule buildRule = new FakeBuildRule(buildTarget, context.getProjectFilesystem(), params); assertFalse(buildRule instanceof CacheableBuildRule); return buildRule; } }); } private void setUpTargetGraphAndResolver(TargetNode<?, ?>... nodes) { targetGraph = TargetGraphFactory.newInstance(nodes); ruleResolver = createBuildRuleResolver(targetGraph); } private BuildRuleResolver createBuildRuleResolver(TargetGraph targetGraph) { return new SingleThreadedBuildRuleResolver( targetGraph, new DefaultTargetNodeToBuildRuleTransformer(), new TestCellBuilder().build().getCellProvider(), null); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.process; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; import org.elasticsearch.persistent.PersistentTasksClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.inference.allocation.TrainedModelAllocation; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import org.elasticsearch.xpack.ml.inference.allocation.TrainedModelAllocationMetadata; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; /** * This class keeps track of the memory requirement of ML jobs. * It only functions on the master node - for this reason it should only be used by master node actions. * The memory requirement for ML jobs can be updated in 4 ways: * 1. For all open ML data frame analytics jobs, anomaly detector jobs and model snapshot upgrades (via {@link #asyncRefresh}) * 2. For all open/started ML jobs and model snapshot upgrades, plus one named ML anomaly detector job that may not be open * (via {@link #refreshAnomalyDetectorJobMemoryAndAllOthers}) * 3. For all open/started ML jobs and model snapshot upgrades, plus one named ML data frame analytics job that is not started * (via {@link #addDataFrameAnalyticsJobMemoryAndRefreshAllOthers}) * 4. For one named ML anomaly detector job or model snapshot upgrade (via {@link #refreshAnomalyDetectorJobMemory}) * In cases 2, 3 and 4 a listener informs the caller when the requested updates are complete. */ public class MlMemoryTracker implements LocalNodeMasterListener { private static final Duration RECENT_UPDATE_THRESHOLD = Duration.ofMinutes(1); private final Logger logger = LogManager.getLogger(MlMemoryTracker.class); private final Map<String, Long> memoryRequirementByAnomalyDetectorJob = new ConcurrentHashMap<>(); private final Map<String, Long> memoryRequirementByDataFrameAnalyticsJob = new ConcurrentHashMap<>(); private final Map<String, Map<String, Long>> memoryRequirementByTaskName; private final List<ActionListener<Void>> fullRefreshCompletionListeners = new ArrayList<>(); private final ThreadPool threadPool; private final ClusterService clusterService; private final JobManager jobManager; private final JobResultsProvider jobResultsProvider; private final DataFrameAnalyticsConfigProvider configProvider; private final Phaser stopPhaser; private final AtomicInteger phase = new AtomicInteger(0); private volatile boolean isMaster; private volatile boolean stopped; private volatile Instant lastUpdateTime; private volatile Duration reassignmentRecheckInterval; public MlMemoryTracker( Settings settings, ClusterService clusterService, ThreadPool threadPool, JobManager jobManager, JobResultsProvider jobResultsProvider, DataFrameAnalyticsConfigProvider configProvider ) { this.threadPool = threadPool; this.clusterService = clusterService; this.jobManager = jobManager; this.jobResultsProvider = jobResultsProvider; this.configProvider = configProvider; this.stopPhaser = new Phaser(1); Map<String, Map<String, Long>> memoryRequirementByTaskName = new TreeMap<>(); memoryRequirementByTaskName.put(MlTasks.JOB_TASK_NAME, memoryRequirementByAnomalyDetectorJob); memoryRequirementByTaskName.put(MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, memoryRequirementByDataFrameAnalyticsJob); // We don't add snapshot upgrade tasks here - instead, we assume they // have the same memory requirement as the job they correspond to. this.memoryRequirementByTaskName = Collections.unmodifiableMap(memoryRequirementByTaskName); setReassignmentRecheckInterval(PersistentTasksClusterService.CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING.get(settings)); clusterService.addLocalNodeMasterListener(this); clusterService.getClusterSettings() .addSettingsUpdateConsumer( PersistentTasksClusterService.CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING, this::setReassignmentRecheckInterval ); } private void setReassignmentRecheckInterval(TimeValue recheckInterval) { reassignmentRecheckInterval = Duration.ofNanos(recheckInterval.getNanos()); } @Override public void onMaster() { isMaster = true; try { asyncRefresh(); } catch (Exception ex) { logger.warn("unexpected failure while attempting asynchronous refresh on new master assignment", ex); } logger.trace("ML memory tracker on master"); } @Override public void offMaster() { isMaster = false; logger.trace("ML memory tracker off master"); clear(); } public void awaitAndClear(ActionListener<Void> listener) { // We never terminate the phaser logger.trace("awaiting and clearing memory tracker"); assert stopPhaser.isTerminated() == false; // If there are no registered parties or no unarrived parties then there is a flaw // in the register/arrive/unregister logic in another method that uses the phaser assert stopPhaser.getRegisteredParties() > 0; assert stopPhaser.getUnarrivedParties() > 0; threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { try { // We await all current refreshes to complete, this increments the "current phase" and prevents // further interaction while we clear contents int newPhase = stopPhaser.arriveAndAwaitAdvance(); assert newPhase > 0; clear(); phase.incrementAndGet(); logger.trace("completed awaiting and clearing memory tracker"); listener.onResponse(null); } catch (Exception e) { logger.warn("failed to wait for all refresh requests to complete", e); listener.onFailure(e); } }); } private void clear() { logger.trace("clearing ML Memory tracker contents"); for (Map<String, Long> memoryRequirementByJob : memoryRequirementByTaskName.values()) { memoryRequirementByJob.clear(); } lastUpdateTime = null; } /** * Wait for all outstanding searches to complete. * After returning, no new searches can be started. */ public void stop() { stopped = true; logger.trace("ML memory tracker stop called"); // We never terminate the phaser assert stopPhaser.isTerminated() == false; // If there are no registered parties or no unarrived parties then there is a flaw // in the register/arrive/unregister logic in another method that uses the phaser assert stopPhaser.getRegisteredParties() > 0; assert stopPhaser.getUnarrivedParties() > 0; stopPhaser.arriveAndAwaitAdvance(); assert stopPhaser.getPhase() > 0; logger.debug("ML memory tracker stopped"); } /** * Is the information in this object sufficiently up to date * for valid task assignment decisions to be made using it? */ public boolean isRecentlyRefreshed() { return isRecentlyRefreshed(reassignmentRecheckInterval); } /** * Is the information in this object sufficiently up to date * for valid task assignment decisions to be made using it? */ public boolean isRecentlyRefreshed(Duration customDuration) { Instant localLastUpdateTime = lastUpdateTime; return isMaster && localLastUpdateTime != null && localLastUpdateTime.plus(RECENT_UPDATE_THRESHOLD).plus(customDuration).isAfter(Instant.now()); } /** * Get the memory requirement for an anomaly detector job. * This method only works on the master node. * @param jobId The job ID. * @return The memory requirement of the job specified by {@code jobId}, * or <code>null</code> if it cannot be calculated. */ public Long getAnomalyDetectorJobMemoryRequirement(String jobId) { return getJobMemoryRequirement(MlTasks.JOB_TASK_NAME, jobId); } /** * Get the memory requirement for a data frame analytics job. * This method only works on the master node. * @param id The job ID. * @return The memory requirement of the job specified by {@code id}, * or <code>null</code> if it cannot be found. */ public Long getDataFrameAnalyticsJobMemoryRequirement(String id) { return getJobMemoryRequirement(MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, id); } /** * Get the memory requirement for a trained model task. * This method only works on the master node. * @param modelId The model ID. * @return The memory requirement of the trained model task specified by {@code modelId}, * or <code>null</code> if it cannot be found. */ public Long getTrainedModelAllocationMemoryRequirement(String modelId) { if (isMaster == false) { return null; } return Optional.ofNullable(TrainedModelAllocationMetadata.fromState(clusterService.state()).modelAllocations().get(modelId)) .map(TrainedModelAllocation::getTaskParams) .map(StartTrainedModelDeploymentAction.TaskParams::estimateMemoryUsageBytes) .orElse(null); } /** * Get the memory requirement for the type of job corresponding to a specified persistent task name. * This method only works on the master node. * @param taskName The persistent task name. * @param id The job ID. * @return The memory requirement of the job specified by {@code id}, * or <code>null</code> if it cannot be found. */ public Long getJobMemoryRequirement(String taskName, String id) { if (isMaster == false) { return null; } // Assume snapshot upgrade tasks have the same memory requirement as the job they correspond to. if (MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME.equals(taskName)) { taskName = MlTasks.JOB_TASK_NAME; } Map<String, Long> memoryRequirementByJob = memoryRequirementByTaskName.get(taskName); if (memoryRequirementByJob == null) { assert false : "Unknown taskName type [" + taskName + "]"; return null; } return memoryRequirementByJob.get(id); } /** * Remove any memory requirement that is stored for the specified anomaly detector job. * It doesn't matter if this method is called for a job that doesn't have a * stored memory requirement. */ public void removeAnomalyDetectorJob(String jobId) { memoryRequirementByAnomalyDetectorJob.remove(jobId); } /** * Remove any memory requirement that is stored for the specified data frame analytics * job. It doesn't matter if this method is called for a job that doesn't have a * stored memory requirement. */ public void removeDataFrameAnalyticsJob(String id) { memoryRequirementByDataFrameAnalyticsJob.remove(id); } /** * Uses a separate thread to refresh the memory requirement for every ML anomaly detector job that has * a corresponding persistent task. This method only works on the master node. * @return <code>true</code> if the async refresh is scheduled, and <code>false</code> * if this is not possible for some reason. */ public boolean asyncRefresh() { if (isMaster) { try { ActionListener<Void> listener = ActionListener.wrap( aVoid -> logger.trace("Job memory requirement refresh request completed successfully"), e -> logIfNecessary(() -> logger.warn("Failed to refresh job memory requirements", e)) ); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute(() -> refresh(clusterService.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE), listener)); return true; } catch (EsRejectedExecutionException e) { logger.warn("Couldn't schedule ML memory update - node might be shutting down", e); } } return false; } /** * This refreshes the memory requirement for every ML job that has a corresponding * persistent task and, in addition, one job that doesn't have a persistent task. * This method only works on the master node. * @param jobId The job ID of the job whose memory requirement is to be refreshed * despite not having a corresponding persistent task. * @param listener Receives the memory requirement of the job specified by {@code jobId}, * or <code>null</code> if it cannot be calculated. */ public void refreshAnomalyDetectorJobMemoryAndAllOthers(String jobId, ActionListener<Long> listener) { if (isMaster == false) { listener.onFailure(new NotMasterException("Request to refresh anomaly detector memory requirements on non-master node")); return; } // Skip the provided job ID in the main refresh, as we unconditionally do it at the end. // Otherwise it might get refreshed twice, because it could have both a job task and a snapshot upgrade task. refresh( clusterService.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE), Collections.singleton(jobId), ActionListener.wrap(aVoid -> refreshAnomalyDetectorJobMemory(jobId, listener), listener::onFailure) ); } /** * This refreshes the memory requirement for every ML job that has a corresponding * persistent task and, in addition, adds the memory requirement of one data frame analytics * job that doesn't have a persistent task. This method only works on the master node. * @param id The job ID of the job whose memory requirement is to be added. * @param mem The memory requirement (in bytes) of the job specified by {@code id}. * @param listener Called when the refresh is complete or fails. */ public void addDataFrameAnalyticsJobMemoryAndRefreshAllOthers(String id, long mem, ActionListener<Void> listener) { if (isMaster == false) { listener.onFailure(new NotMasterException("Request to put data frame analytics memory requirement on non-master node")); return; } memoryRequirementByDataFrameAnalyticsJob.put(id, mem + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes()); PersistentTasksCustomMetadata persistentTasks = clusterService.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE); refresh(persistentTasks, listener); } /** * This refreshes the memory requirement for every ML job that has a corresponding persistent task. * It does NOT remove entries for jobs that no longer have a persistent task, because that would lead * to a race where a job was opened part way through the refresh. (Instead, entries are removed when * jobs are deleted.) */ public void refresh(PersistentTasksCustomMetadata persistentTasks, ActionListener<Void> onCompletion) { refresh(persistentTasks, Collections.emptySet(), onCompletion); } void refresh(PersistentTasksCustomMetadata persistentTasks, Set<String> jobIdsToSkip, ActionListener<Void> onCompletion) { synchronized (fullRefreshCompletionListeners) { fullRefreshCompletionListeners.add(onCompletion); if (fullRefreshCompletionListeners.size() > 1) { // A refresh is already in progress, so don't do another return; } } ActionListener<Void> refreshComplete = ActionListener.wrap(aVoid -> { synchronized (fullRefreshCompletionListeners) { assert fullRefreshCompletionListeners.isEmpty() == false; if (isMaster) { lastUpdateTime = Instant.now(); for (ActionListener<Void> listener : fullRefreshCompletionListeners) { listener.onResponse(null); } logger.trace("ML memory tracker last update time now [{}] and listeners called", lastUpdateTime); } else { Exception e = new NotMasterException("Node ceased to be master during ML memory tracker refresh"); for (ActionListener<Void> listener : fullRefreshCompletionListeners) { listener.onFailure(e); } } fullRefreshCompletionListeners.clear(); } }, e -> { synchronized (fullRefreshCompletionListeners) { assert fullRefreshCompletionListeners.isEmpty() == false; for (ActionListener<Void> listener : fullRefreshCompletionListeners) { listener.onFailure(e); } logIfNecessary(() -> logger.warn("ML memory tracker last update failed and listeners called", e)); // It's critical that we empty out the current listener list on // error otherwise subsequent retries to refresh will be ignored fullRefreshCompletionListeners.clear(); } }); // persistentTasks will be null if there's never been a persistent task created in this cluster if (persistentTasks == null) { refreshComplete.onResponse(null); } else { List<PersistentTasksCustomMetadata.PersistentTask<?>> mlDataFrameAnalyticsJobTasks = persistentTasks.tasks() .stream() .filter(task -> MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME.equals(task.getTaskName())) .collect(Collectors.toList()); ActionListener<Void> refreshDataFrameAnalyticsJobs = ActionListener.wrap( aVoid -> refreshAllDataFrameAnalyticsJobTasks(mlDataFrameAnalyticsJobTasks, refreshComplete), refreshComplete::onFailure ); Set<String> mlAnomalyDetectorJobTasks = Stream.concat( persistentTasks.tasks() .stream() .filter(task -> MlTasks.JOB_TASK_NAME.equals(task.getTaskName())) .map(task -> ((OpenJobAction.JobParams) task.getParams()).getJobId()), persistentTasks.tasks() .stream() .filter(task -> MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME.equals(task.getTaskName())) .map(task -> ((SnapshotUpgradeTaskParams) task.getParams()).getJobId()) ).filter(jobId -> jobIdsToSkip.contains(jobId) == false).collect(Collectors.toSet()); iterateAnomalyDetectorJobs(mlAnomalyDetectorJobTasks.iterator(), refreshDataFrameAnalyticsJobs); } } private void iterateAnomalyDetectorJobs(Iterator<String> iterator, ActionListener<Void> refreshComplete) { if (iterator.hasNext()) { refreshAnomalyDetectorJobMemory( iterator.next(), ActionListener.wrap( // Do the next iteration in a different thread, otherwise stack overflow // can occur if the searches happen to be on the local node, as the huge // chain of listeners are all called in the same thread if only one node // is involved mem -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) .execute(() -> iterateAnomalyDetectorJobs(iterator, refreshComplete)), refreshComplete::onFailure ) ); } else { refreshComplete.onResponse(null); } } private void refreshAllDataFrameAnalyticsJobTasks( List<PersistentTasksCustomMetadata.PersistentTask<?>> mlDataFrameAnalyticsJobTasks, ActionListener<Void> listener ) { if (mlDataFrameAnalyticsJobTasks.isEmpty()) { listener.onResponse(null); return; } Set<String> jobsWithTasks = mlDataFrameAnalyticsJobTasks.stream() .map(task -> ((StartDataFrameAnalyticsAction.TaskParams) task.getParams()).getId()) .collect(Collectors.toSet()); configProvider.getConfigsForJobsWithTasksLeniently(jobsWithTasks, ActionListener.wrap(analyticsConfigs -> { for (DataFrameAnalyticsConfig analyticsConfig : analyticsConfigs) { memoryRequirementByDataFrameAnalyticsJob.put( analyticsConfig.getId(), analyticsConfig.getModelMemoryLimit().getBytes() + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes() ); } listener.onResponse(null); }, listener::onFailure)); } /** * Refresh the memory requirement for a single anomaly detector job. * This method only works on the master node. * @param jobId The ID of the job to refresh the memory requirement for. * @param listener Receives the job's memory requirement, or <code>null</code> * if it cannot be calculated. */ public void refreshAnomalyDetectorJobMemory(String jobId, ActionListener<Long> listener) { if (isMaster == false) { listener.onFailure(new NotMasterException("Request to refresh anomaly detector memory requirement on non-master node")); return; } // The phaser prevents searches being started after the memory tracker's stop() method has returned // Note: `phase` is incremented if cache is reset via the feature reset API if (stopPhaser.register() != phase.get()) { // Phases above not equal to `phase` mean we've been stopped, so don't do any operations that involve external interaction stopPhaser.arriveAndDeregister(); logger.info(() -> new ParameterizedMessage("[{}] not refreshing anomaly detector memory as node is shutting down", jobId)); listener.onFailure(new EsRejectedExecutionException("Couldn't run ML memory update - node is shutting down")); return; } ActionListener<Long> phaserListener = ActionListener.wrap(r -> { stopPhaser.arriveAndDeregister(); listener.onResponse(r); }, e -> { stopPhaser.arriveAndDeregister(); listener.onFailure(e); }); try { jobResultsProvider.getEstablishedMemoryUsage(jobId, null, null, establishedModelMemoryBytes -> { if (establishedModelMemoryBytes <= 0L) { setAnomalyDetectorJobMemoryToLimit(jobId, phaserListener); } else { Long memoryRequirementBytes = establishedModelMemoryBytes + Job.PROCESS_MEMORY_OVERHEAD.getBytes(); memoryRequirementByAnomalyDetectorJob.put(jobId, memoryRequirementBytes); phaserListener.onResponse(memoryRequirementBytes); } }, e -> { logIfNecessary( () -> logger.error( () -> new ParameterizedMessage( "[{}] failed to calculate anomaly detector job established model memory requirement", jobId ), e ) ); setAnomalyDetectorJobMemoryToLimit(jobId, phaserListener); }); } catch (Exception e) { logIfNecessary( () -> logger.error( () -> new ParameterizedMessage( "[{}] failed to calculate anomaly detector job established model memory requirement", jobId ), e ) ); setAnomalyDetectorJobMemoryToLimit(jobId, phaserListener); } } private void setAnomalyDetectorJobMemoryToLimit(String jobId, ActionListener<Long> listener) { jobManager.getJob(jobId, ActionListener.wrap(job -> { Long memoryLimitMb = (job.getAnalysisLimits() != null) ? job.getAnalysisLimits().getModelMemoryLimit() : null; // Although recent versions of the code enforce a non-null model_memory_limit // when parsing, the job could have been streamed from an older version node in // a mixed version cluster if (memoryLimitMb == null) { memoryLimitMb = AnalysisLimits.PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB; } Long memoryRequirementBytes = ByteSizeValue.ofMb(memoryLimitMb).getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes(); memoryRequirementByAnomalyDetectorJob.put(jobId, memoryRequirementBytes); listener.onResponse(memoryRequirementBytes); }, e -> { if (e instanceof ResourceNotFoundException) { // TODO: does this also happen if the .ml-config index exists but is unavailable? // However, note that we wait for the .ml-config index to be available earlier on in the // job assignment process, so that scenario should be very rare, i.e. somebody has closed // the .ml-config index (which would be unexpected and unsupported for an internal index) // during the memory refresh. logger.trace("[{}] anomaly detector job deleted during ML memory update", jobId); } else { logIfNecessary( () -> logger.error( () -> new ParameterizedMessage("[{}] failed to get anomaly detector job during ML memory update", jobId), e ) ); } memoryRequirementByAnomalyDetectorJob.remove(jobId); listener.onResponse(null); })); } /** * To reduce spamming the log in an unstable environment, this method will only call the runnable if: * - The current node is the master node (and thus valid for tracking memory) * - The current node is NOT stopped (and thus not shutting down) * @param log Runnable that writes the log message */ private void logIfNecessary(Runnable log) { if (isMaster && (stopped == false)) { log.run(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.metadata.entitytupletranslators; import java.io.DataOutput; import org.apache.asterix.builders.IARecordBuilder; import org.apache.asterix.builders.OrderedListBuilder; import org.apache.asterix.builders.RecordBuilder; import org.apache.asterix.common.metadata.DataverseName; import org.apache.asterix.common.transactions.TxnId; import org.apache.asterix.metadata.MetadataNode; import org.apache.asterix.metadata.api.IMetadataIndex; import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes; import org.apache.asterix.metadata.entities.Datatype; import org.apache.asterix.om.base.ABoolean; import org.apache.asterix.om.types.AOrderedListType; import org.apache.asterix.om.types.ARecordType; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.AUnionType; import org.apache.asterix.om.types.AbstractCollectionType; import org.apache.asterix.om.types.AbstractComplexType; import org.apache.asterix.om.types.IAType; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.api.exceptions.ErrorCode; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.data.std.util.ArrayBackedValueStorage; public abstract class AbstractDatatypeTupleTranslator<T> extends AbstractTupleTranslator<T> { public enum DerivedTypeTag { RECORD, UNORDEREDLIST, ORDEREDLIST } protected final MetadataNode metadataNode; protected final TxnId txnId; public AbstractDatatypeTupleTranslator(TxnId txnId, MetadataNode metadataNode, boolean getTuple, IMetadataIndex metadataIndex, int payloadTupleFieldIndex) { super(getTuple, metadataIndex, payloadTupleFieldIndex); this.txnId = txnId; this.metadataNode = metadataNode; } protected void writeDerivedTypeRecord(DataverseName dataverseName, AbstractComplexType derivedDatatype, DataOutput out, boolean isAnonymous) throws HyracksDataException { DerivedTypeTag tag; IARecordBuilder derivedRecordBuilder = new RecordBuilder(); ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage(); switch (derivedDatatype.getTypeTag()) { case ARRAY: tag = DerivedTypeTag.ORDEREDLIST; break; case MULTISET: tag = DerivedTypeTag.UNORDEREDLIST; break; case OBJECT: tag = DerivedTypeTag.RECORD; break; default: throw new UnsupportedOperationException( "No metadata record Type for " + derivedDatatype.getDisplayName()); } derivedRecordBuilder.reset(MetadataRecordTypes.DERIVEDTYPE_RECORDTYPE); // write field 0 fieldValue.reset(); aString.setValue(tag.toString()); stringSerde.serialize(aString, fieldValue.getDataOutput()); derivedRecordBuilder.addField(MetadataRecordTypes.DERIVEDTYPE_ARECORD_TAG_FIELD_INDEX, fieldValue); // write field 1 fieldValue.reset(); booleanSerde.serialize(ABoolean.valueOf(isAnonymous), fieldValue.getDataOutput()); derivedRecordBuilder.addField(MetadataRecordTypes.DERIVEDTYPE_ARECORD_ISANONYMOUS_FIELD_INDEX, fieldValue); switch (tag) { case RECORD: fieldValue.reset(); writeRecordType(dataverseName, derivedDatatype, fieldValue.getDataOutput()); derivedRecordBuilder.addField(MetadataRecordTypes.DERIVEDTYPE_ARECORD_RECORD_FIELD_INDEX, fieldValue); break; case UNORDEREDLIST: fieldValue.reset(); writeCollectionType(dataverseName, derivedDatatype, fieldValue.getDataOutput()); derivedRecordBuilder.addField(MetadataRecordTypes.DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX, fieldValue); break; case ORDEREDLIST: fieldValue.reset(); writeCollectionType(dataverseName, derivedDatatype, fieldValue.getDataOutput()); derivedRecordBuilder.addField(MetadataRecordTypes.DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX, fieldValue); break; } derivedRecordBuilder.write(out, true); } private void writeCollectionType(DataverseName dataverseName, AbstractComplexType type, DataOutput out) throws HyracksDataException { AbstractCollectionType listType = (AbstractCollectionType) type; IAType itemType = listType.getItemType(); if (itemType.getTypeTag().isDerivedType()) { handleNestedDerivedType(dataverseName, itemType.getTypeName(), (AbstractComplexType) itemType); } aString.setValue(listType.getItemType().getTypeName()); stringSerde.serialize(aString, out); } private void writeRecordType(DataverseName dataverseName, AbstractComplexType type, DataOutput out) throws HyracksDataException { ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage(); ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage(); IARecordBuilder recordRecordBuilder = new RecordBuilder(); IARecordBuilder fieldRecordBuilder = new RecordBuilder(); ARecordType recType = (ARecordType) type; OrderedListBuilder listBuilder = new OrderedListBuilder(); listBuilder.reset(new AOrderedListType(MetadataRecordTypes.FIELD_RECORDTYPE, null)); for (int i = 0, n = recType.getFieldNames().length; i < n; i++) { IAType fieldType = recType.getFieldTypes()[i]; boolean fieldIsNullable = false; boolean fieldIsMissable = false; if (fieldType.getTypeTag() == ATypeTag.UNION) { AUnionType fieldUnionType = (AUnionType) fieldType; fieldIsNullable = fieldUnionType.isNullableType(); fieldIsMissable = fieldUnionType.isMissableType(); fieldType = fieldUnionType.getActualType(); } if (fieldType.getTypeTag().isDerivedType()) { handleNestedDerivedType(dataverseName, fieldType.getTypeName(), (AbstractComplexType) fieldType); } itemValue.reset(); fieldRecordBuilder.reset(MetadataRecordTypes.FIELD_RECORDTYPE); // write field 0 fieldValue.reset(); aString.setValue(recType.getFieldNames()[i]); stringSerde.serialize(aString, fieldValue.getDataOutput()); fieldRecordBuilder.addField(MetadataRecordTypes.FIELD_ARECORD_FIELDNAME_FIELD_INDEX, fieldValue); // write field 1 fieldValue.reset(); aString.setValue(fieldType.getTypeName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); fieldRecordBuilder.addField(MetadataRecordTypes.FIELD_ARECORD_FIELDTYPE_FIELD_INDEX, fieldValue); // write field 2 fieldValue.reset(); booleanSerde.serialize(ABoolean.valueOf(fieldIsNullable), fieldValue.getDataOutput()); fieldRecordBuilder.addField(MetadataRecordTypes.FIELD_ARECORD_ISNULLABLE_FIELD_INDEX, fieldValue); // write open fields fieldName.reset(); aString.setValue(MetadataRecordTypes.FIELD_NAME_IS_MISSABLE); stringSerde.serialize(aString, fieldName.getDataOutput()); fieldValue.reset(); booleanSerde.serialize(ABoolean.valueOf(fieldIsMissable), fieldValue.getDataOutput()); fieldRecordBuilder.addField(fieldName, fieldValue); // write record fieldRecordBuilder.write(itemValue.getDataOutput(), true); // add item to the list of fields listBuilder.addItem(itemValue); } recordRecordBuilder.reset(MetadataRecordTypes.RECORD_RECORDTYPE); // write field 0 fieldValue.reset(); booleanSerde.serialize(ABoolean.valueOf(recType.isOpen()), fieldValue.getDataOutput()); recordRecordBuilder.addField(MetadataRecordTypes.RECORDTYPE_ARECORD_ISOPEN_FIELD_INDEX, fieldValue); // write field 1 fieldValue.reset(); listBuilder.write(fieldValue.getDataOutput(), true); recordRecordBuilder.addField(MetadataRecordTypes.RECORDTYPE_ARECORD_FIELDS_FIELD_INDEX, fieldValue); // write record recordRecordBuilder.write(out, true); } protected void handleNestedDerivedType(DataverseName dataverseName, String typeName, AbstractComplexType nestedType) throws HyracksDataException { try { metadataNode.addDatatype(txnId, new Datatype(dataverseName, typeName, nestedType, true)); } catch (AlgebricksException e) { // The nested record type may have been inserted by a previous DDL statement or // by a previous nested type. if (!(e.getCause() instanceof HyracksDataException)) { throw HyracksDataException.create(e); } else { HyracksDataException hde = (HyracksDataException) e.getCause(); if (!hde.getComponent().equals(ErrorCode.HYRACKS) || hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) { throw hde; } } } } }
// DO NOT EDIT. Make changes to PDBPresentation.java instead. package org.pachyderm.foundation.eof; import com.webobjects.eoaccess.*; import com.webobjects.eocontrol.*; import com.webobjects.foundation.*; import java.math.*; import java.util.*; import er.extensions.eof.*; import er.extensions.foundation.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings("all") public abstract class _PDBPresentation extends ERXGenericRecord { public static final String ENTITY_NAME = "PDBPresentation"; // Attribute Keys public static final ERXKey<String> AUTHOR = new ERXKey<String>("author"); public static final ERXKey<NSTimestamp> DATE_CREATED = new ERXKey<NSTimestamp>("dateCreated"); public static final ERXKey<NSTimestamp> DATE_MODIFIED = new ERXKey<NSTimestamp>("dateModified"); public static final ERXKey<String> IDENTIFIER = new ERXKey<String>("identifier"); public static final ERXKey<org.pachyderm.apollo.core.CXLocalizedValue> LOCALIZED_DESCRIPTION = new ERXKey<org.pachyderm.apollo.core.CXLocalizedValue>("localizedDescription"); public static final ERXKey<String> METADATA = new ERXKey<String>("metadata"); public static final ERXKey<Integer> PK = new ERXKey<Integer>("pk"); public static final ERXKey<String> TITLE = new ERXKey<String>("title"); // Relationship Keys public static final ERXKey<org.pachyderm.foundation.eof.PDBScreen> EVERY_SCREEN = new ERXKey<org.pachyderm.foundation.eof.PDBScreen>("everyScreen"); public static final ERXKey<org.pachyderm.foundation.eof.PDBScreen> PRIME_SCREEN = new ERXKey<org.pachyderm.foundation.eof.PDBScreen>("primeScreen"); // Attributes public static final String AUTHOR_KEY = AUTHOR.key(); public static final String DATE_CREATED_KEY = DATE_CREATED.key(); public static final String DATE_MODIFIED_KEY = DATE_MODIFIED.key(); public static final String IDENTIFIER_KEY = IDENTIFIER.key(); public static final String LOCALIZED_DESCRIPTION_KEY = LOCALIZED_DESCRIPTION.key(); public static final String METADATA_KEY = METADATA.key(); public static final String PK_KEY = PK.key(); public static final String TITLE_KEY = TITLE.key(); // Relationships public static final String EVERY_SCREEN_KEY = EVERY_SCREEN.key(); public static final String PRIME_SCREEN_KEY = PRIME_SCREEN.key(); private static final Logger log = LoggerFactory.getLogger(_PDBPresentation.class); public PDBPresentation localInstanceIn(EOEditingContext editingContext) { PDBPresentation localInstance = (PDBPresentation)EOUtilities.localInstanceOfObject(editingContext, this); if (localInstance == null) { throw new IllegalStateException("You attempted to localInstance " + this + ", which has not yet committed."); } return localInstance; } public String author() { return (String) storedValueForKey(_PDBPresentation.AUTHOR_KEY); } public void setAuthor(String value) { log.debug( "updating author from {} to {}", author(), value); takeStoredValueForKey(value, _PDBPresentation.AUTHOR_KEY); } public NSTimestamp dateCreated() { return (NSTimestamp) storedValueForKey(_PDBPresentation.DATE_CREATED_KEY); } public void setDateCreated(NSTimestamp value) { log.debug( "updating dateCreated from {} to {}", dateCreated(), value); takeStoredValueForKey(value, _PDBPresentation.DATE_CREATED_KEY); } public NSTimestamp dateModified() { return (NSTimestamp) storedValueForKey(_PDBPresentation.DATE_MODIFIED_KEY); } public void setDateModified(NSTimestamp value) { log.debug( "updating dateModified from {} to {}", dateModified(), value); takeStoredValueForKey(value, _PDBPresentation.DATE_MODIFIED_KEY); } public String identifier() { return (String) storedValueForKey(_PDBPresentation.IDENTIFIER_KEY); } public void setIdentifier(String value) { log.debug( "updating identifier from {} to {}", identifier(), value); takeStoredValueForKey(value, _PDBPresentation.IDENTIFIER_KEY); } public org.pachyderm.apollo.core.CXLocalizedValue localizedDescription() { return (org.pachyderm.apollo.core.CXLocalizedValue) storedValueForKey(_PDBPresentation.LOCALIZED_DESCRIPTION_KEY); } public void setLocalizedDescription(org.pachyderm.apollo.core.CXLocalizedValue value) { log.debug( "updating localizedDescription from {} to {}", localizedDescription(), value); takeStoredValueForKey(value, _PDBPresentation.LOCALIZED_DESCRIPTION_KEY); } public String metadata() { return (String) storedValueForKey(_PDBPresentation.METADATA_KEY); } public void setMetadata(String value) { log.debug( "updating metadata from {} to {}", metadata(), value); takeStoredValueForKey(value, _PDBPresentation.METADATA_KEY); } public Integer pk() { return (Integer) storedValueForKey(_PDBPresentation.PK_KEY); } public void setPk(Integer value) { log.debug( "updating pk from {} to {}", pk(), value); takeStoredValueForKey(value, _PDBPresentation.PK_KEY); } public String title() { return (String) storedValueForKey(_PDBPresentation.TITLE_KEY); } public void setTitle(String value) { log.debug( "updating title from {} to {}", title(), value); takeStoredValueForKey(value, _PDBPresentation.TITLE_KEY); } public org.pachyderm.foundation.eof.PDBScreen primeScreen() { return (org.pachyderm.foundation.eof.PDBScreen)storedValueForKey(_PDBPresentation.PRIME_SCREEN_KEY); } public void setPrimeScreen(org.pachyderm.foundation.eof.PDBScreen value) { takeStoredValueForKey(value, _PDBPresentation.PRIME_SCREEN_KEY); } public void setPrimeScreenRelationship(org.pachyderm.foundation.eof.PDBScreen value) { log.debug("updating primeScreen from {} to {}", primeScreen(), value); if (er.extensions.eof.ERXGenericRecord.InverseRelationshipUpdater.updateInverseRelationships()) { setPrimeScreen(value); } else if (value == null) { org.pachyderm.foundation.eof.PDBScreen oldValue = primeScreen(); if (oldValue != null) { removeObjectFromBothSidesOfRelationshipWithKey(oldValue, _PDBPresentation.PRIME_SCREEN_KEY); } } else { addObjectToBothSidesOfRelationshipWithKey(value, _PDBPresentation.PRIME_SCREEN_KEY); } } public NSArray<org.pachyderm.foundation.eof.PDBScreen> everyScreen() { return (NSArray<org.pachyderm.foundation.eof.PDBScreen>)storedValueForKey(_PDBPresentation.EVERY_SCREEN_KEY); } public NSArray<org.pachyderm.foundation.eof.PDBScreen> everyScreen(EOQualifier qualifier) { return everyScreen(qualifier, null, false); } public NSArray<org.pachyderm.foundation.eof.PDBScreen> everyScreen(EOQualifier qualifier, boolean fetch) { return everyScreen(qualifier, null, fetch); } public NSArray<org.pachyderm.foundation.eof.PDBScreen> everyScreen(EOQualifier qualifier, NSArray<EOSortOrdering> sortOrderings, boolean fetch) { NSArray<org.pachyderm.foundation.eof.PDBScreen> results; if (fetch) { EOQualifier fullQualifier; EOQualifier inverseQualifier = ERXQ.equals(org.pachyderm.foundation.eof.PDBScreen.PRESENTATION_KEY, this); if (qualifier == null) { fullQualifier = inverseQualifier; } else { fullQualifier = ERXQ.and(qualifier, inverseQualifier); } results = org.pachyderm.foundation.eof.PDBScreen.fetchPDBScreens(editingContext(), fullQualifier, sortOrderings); } else { results = everyScreen(); if (qualifier != null) { results = (NSArray<org.pachyderm.foundation.eof.PDBScreen>)EOQualifier.filteredArrayWithQualifier(results, qualifier); } if (sortOrderings != null) { results = (NSArray<org.pachyderm.foundation.eof.PDBScreen>)EOSortOrdering.sortedArrayUsingKeyOrderArray(results, sortOrderings); } } return results; } public void addToEveryScreen(org.pachyderm.foundation.eof.PDBScreen object) { includeObjectIntoPropertyWithKey(object, _PDBPresentation.EVERY_SCREEN_KEY); } public void removeFromEveryScreen(org.pachyderm.foundation.eof.PDBScreen object) { excludeObjectFromPropertyWithKey(object, _PDBPresentation.EVERY_SCREEN_KEY); } public void addToEveryScreenRelationship(org.pachyderm.foundation.eof.PDBScreen object) { log.debug("adding {} to everyScreen relationship", object); if (er.extensions.eof.ERXGenericRecord.InverseRelationshipUpdater.updateInverseRelationships()) { addToEveryScreen(object); } else { addObjectToBothSidesOfRelationshipWithKey(object, _PDBPresentation.EVERY_SCREEN_KEY); } } public void removeFromEveryScreenRelationship(org.pachyderm.foundation.eof.PDBScreen object) { log.debug("removing {} from everyScreen relationship", object); if (er.extensions.eof.ERXGenericRecord.InverseRelationshipUpdater.updateInverseRelationships()) { removeFromEveryScreen(object); } else { removeObjectFromBothSidesOfRelationshipWithKey(object, _PDBPresentation.EVERY_SCREEN_KEY); } } public org.pachyderm.foundation.eof.PDBScreen createEveryScreenRelationship() { EOEnterpriseObject eo = EOUtilities.createAndInsertInstance(editingContext(), org.pachyderm.foundation.eof.PDBScreen.ENTITY_NAME ); addObjectToBothSidesOfRelationshipWithKey(eo, _PDBPresentation.EVERY_SCREEN_KEY); return (org.pachyderm.foundation.eof.PDBScreen) eo; } public void deleteEveryScreenRelationship(org.pachyderm.foundation.eof.PDBScreen object) { removeObjectFromBothSidesOfRelationshipWithKey(object, _PDBPresentation.EVERY_SCREEN_KEY); editingContext().deleteObject(object); } public void deleteAllEveryScreenRelationships() { Enumeration<org.pachyderm.foundation.eof.PDBScreen> objects = everyScreen().immutableClone().objectEnumerator(); while (objects.hasMoreElements()) { deleteEveryScreenRelationship(objects.nextElement()); } } public static PDBPresentation createPDBPresentation(EOEditingContext editingContext, String identifier , Integer pk , org.pachyderm.foundation.eof.PDBScreen primeScreen) { PDBPresentation eo = (PDBPresentation) EOUtilities.createAndInsertInstance(editingContext, _PDBPresentation.ENTITY_NAME); eo.setIdentifier(identifier); eo.setPk(pk); eo.setPrimeScreenRelationship(primeScreen); return eo; } public static ERXFetchSpecification<PDBPresentation> fetchSpec() { return new ERXFetchSpecification<PDBPresentation>(_PDBPresentation.ENTITY_NAME, null, null, false, true, null); } public static NSArray<PDBPresentation> fetchAllPDBPresentations(EOEditingContext editingContext) { return _PDBPresentation.fetchAllPDBPresentations(editingContext, null); } public static NSArray<PDBPresentation> fetchAllPDBPresentations(EOEditingContext editingContext, NSArray<EOSortOrdering> sortOrderings) { return _PDBPresentation.fetchPDBPresentations(editingContext, null, sortOrderings); } public static NSArray<PDBPresentation> fetchPDBPresentations(EOEditingContext editingContext, EOQualifier qualifier, NSArray<EOSortOrdering> sortOrderings) { ERXFetchSpecification<PDBPresentation> fetchSpec = new ERXFetchSpecification<PDBPresentation>(_PDBPresentation.ENTITY_NAME, qualifier, sortOrderings); NSArray<PDBPresentation> eoObjects = fetchSpec.fetchObjects(editingContext); return eoObjects; } public static PDBPresentation fetchPDBPresentation(EOEditingContext editingContext, String keyName, Object value) { return _PDBPresentation.fetchPDBPresentation(editingContext, ERXQ.equals(keyName, value)); } public static PDBPresentation fetchPDBPresentation(EOEditingContext editingContext, EOQualifier qualifier) { NSArray<PDBPresentation> eoObjects = _PDBPresentation.fetchPDBPresentations(editingContext, qualifier, null); PDBPresentation eoObject; int count = eoObjects.count(); if (count == 0) { eoObject = null; } else if (count == 1) { eoObject = eoObjects.objectAtIndex(0); } else { throw new IllegalStateException("There was more than one PDBPresentation that matched the qualifier '" + qualifier + "'."); } return eoObject; } public static PDBPresentation fetchRequiredPDBPresentation(EOEditingContext editingContext, String keyName, Object value) { return _PDBPresentation.fetchRequiredPDBPresentation(editingContext, ERXQ.equals(keyName, value)); } public static PDBPresentation fetchRequiredPDBPresentation(EOEditingContext editingContext, EOQualifier qualifier) { PDBPresentation eoObject = _PDBPresentation.fetchPDBPresentation(editingContext, qualifier); if (eoObject == null) { throw new NoSuchElementException("There was no PDBPresentation that matched the qualifier '" + qualifier + "'."); } return eoObject; } public static PDBPresentation localInstanceIn(EOEditingContext editingContext, PDBPresentation eo) { PDBPresentation localInstance = (eo == null) ? null : ERXEOControlUtilities.localInstanceOfObject(editingContext, eo); if (localInstance == null && eo != null) { throw new IllegalStateException("You attempted to localInstance " + eo + ", which has not yet committed."); } return localInstance; } public static NSArray<org.pachyderm.foundation.eof.PDBPresentation> fetchAllPresentations(EOEditingContext editingContext, NSDictionary<String, Object> bindings) { EOFetchSpecification fetchSpec = EOFetchSpecification.fetchSpecificationNamed("allPresentations", _PDBPresentation.ENTITY_NAME); fetchSpec = fetchSpec.fetchSpecificationWithQualifierBindings(bindings); return (NSArray<org.pachyderm.foundation.eof.PDBPresentation>)editingContext.objectsWithFetchSpecification(fetchSpec); } public static NSArray<org.pachyderm.foundation.eof.PDBPresentation> fetchAllPresentations(EOEditingContext editingContext) { EOFetchSpecification fetchSpec = EOFetchSpecification.fetchSpecificationNamed("allPresentations", _PDBPresentation.ENTITY_NAME); return (NSArray<org.pachyderm.foundation.eof.PDBPresentation>)editingContext.objectsWithFetchSpecification(fetchSpec); } public static NSArray<org.pachyderm.foundation.eof.PDBPresentation> fetchPresentationNamesAndIdentities(EOEditingContext editingContext, NSDictionary<String, Object> bindings) { EOFetchSpecification fetchSpec = EOFetchSpecification.fetchSpecificationNamed("presentationNamesAndIdentities", _PDBPresentation.ENTITY_NAME); fetchSpec = fetchSpec.fetchSpecificationWithQualifierBindings(bindings); return (NSArray<org.pachyderm.foundation.eof.PDBPresentation>)editingContext.objectsWithFetchSpecification(fetchSpec); } public static NSArray<org.pachyderm.foundation.eof.PDBPresentation> fetchPresentationNamesAndIdentities(EOEditingContext editingContext, String authorBinding) { EOFetchSpecification fetchSpec = EOFetchSpecification.fetchSpecificationNamed("presentationNamesAndIdentities", _PDBPresentation.ENTITY_NAME); NSMutableDictionary<String, Object> bindings = new NSMutableDictionary<String, Object>(); bindings.takeValueForKey(authorBinding, "author"); fetchSpec = fetchSpec.fetchSpecificationWithQualifierBindings(bindings); return (NSArray<org.pachyderm.foundation.eof.PDBPresentation>)editingContext.objectsWithFetchSpecification(fetchSpec); } }
/* * All rights reserved. (C) Copyright 2009, Trinity College Dublin */ package com.mind_era.knime.util.merge; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.knime.base.node.util.DefaultDataArray; import org.knime.core.data.DataCell; import org.knime.core.data.DataRow; import org.knime.core.data.DataTableSpec; import org.knime.core.data.RowKey; import org.knime.core.data.container.WrappedTable; import org.knime.core.node.BufferedDataContainer; import org.knime.core.node.BufferedDataTable; import org.knime.core.node.CanceledExecutionException; import org.knime.core.node.ExecutionContext; import org.knime.core.node.ExecutionMonitor; import org.knime.core.node.InvalidSettingsException; import org.knime.core.node.NodeLogger; import org.knime.core.node.NodeModel; import org.knime.core.node.NodeSettingsRO; import org.knime.core.node.NodeSettingsWO; import org.knime.core.node.defaultnodesettings.SettingsModelBoolean; import org.knime.core.node.defaultnodesettings.SettingsModelFilterString; import org.knime.core.util.Pair; import com.mind_era.knime.common.util.Misc; /** * This is the model implementation of Merge. Resorts the rows. It is mostly * like an "anti-sort". * * @author <a href="mailto:bakosg@tcd.ie">Gabor Bakos</a> */ public class MergeNodeModel extends NodeModel { // the logger instance private static final NodeLogger logger = NodeLogger .getLogger(MergeNodeModel.class); /** Configuration key for stop if there were missing row from a series */ static final String CFGKEY_HALT_ON_ERROR = "haltOnError"; /** Stop on errors. */ static final boolean DEFAULT_HALT_ON_ERROR = true; /** Configuration key for columns to work with. */ static final String CFGKEY_MERGE_COLUMNS = "mergeColumns"; /** No selection */ static final String[] DEFAULT_MERGE_COLUMNS = new String[0]; /** Configuration key for work in memory. */ static final String CFGKEY_SORT_IN_MEMORY = "sortInMemory"; /** Sort in memory */ static final boolean DEFAULT_SORT_IN_MEMORY = true; /** Configuration key to reverse the order within a group. */ static final String CFGKEY_SORT_ORDER_REVERSED = "sortOrderReversed"; /** Do not reverse, leave original order. */ static final boolean DEFAULT_SORT_ORDER_REVERSED = false; private final SettingsModelFilterString mergeColumns = new SettingsModelFilterString( CFGKEY_MERGE_COLUMNS); private final SettingsModelBoolean sortInMemory = new SettingsModelBoolean( CFGKEY_SORT_IN_MEMORY, DEFAULT_SORT_IN_MEMORY); private final SettingsModelBoolean haltOnError = new SettingsModelBoolean( MergeNodeModel.CFGKEY_HALT_ON_ERROR, MergeNodeModel.DEFAULT_HALT_ON_ERROR); private final SettingsModelBoolean sortOrderReversed = new SettingsModelBoolean( MergeNodeModel.CFGKEY_SORT_ORDER_REVERSED, MergeNodeModel.DEFAULT_SORT_ORDER_REVERSED); /** * Constructor for the node model. */ protected MergeNodeModel() { super(1, 1); } /** * {@inheritDoc} */ @Override protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception { final List<String> columns = mergeColumns.getIncludeList(); final BufferedDataTable inputTable = inData[0]; final DataTableSpec dataTableSpec = inputTable.getDataTableSpec(); final int[] colIndices = computeColIndices(columns, dataTableSpec); final Map<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>> patternToKeys = computePatternToKeys( inputTable, colIndices); final BufferedDataContainer container = exec .createDataContainer(inputTable.getDataTableSpec()); if (sortInMemory.getBooleanValue()) { final DefaultDataArray dataArray = new DefaultDataArray( new WrappedTable(inputTable), 1, (int)inputTable.size(), exec); for (final List<Integer> rowIndices : generateBlocks(patternToKeys, inputTable.size())) { for (final Integer rowInteger : rowIndices) { container.addRowToTable(dataArray.getRow(rowInteger .intValue())); } } } else {// FIXME do something good here } container.close(); final BufferedDataTable out = container.getTable(); return new BufferedDataTable[] { out }; } /** * @param patternToKeys * The mapping from row content to row indices. * @param rowCount * The actual row number. * @return Ordered list of row indices (starting from {@code 0}). */ private Iterable<List<Integer>> generateBlocks( final Map<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>> patternToKeys, final long rowCount) { return new Iterable<List<Integer>>() { @Override public Iterator<List<Integer>> iterator() { return new Iterator<List<Integer>>() { private static final String HEADER = "Problem with value | column\n"; private boolean hasNext = rowCount > 0; @SuppressWarnings("synthetic-access") private final boolean reverse = sortOrderReversed .getBooleanValue(); @SuppressWarnings("synthetic-access") private final boolean haltIfError = haltOnError .getBooleanValue(); private final Map<Map<Integer, DataCell>, Iterator<Pair<RowKey, Integer>>> iterators = new HashMap<Map<Integer, DataCell>, Iterator<Pair<RowKey, Integer>>>(); { for (final Entry<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>> entry : patternToKeys .entrySet()) { iterators.put(entry.getKey(), entry.getValue() .iterator()); } } private final StringBuilder sb = new StringBuilder(HEADER); @Override public List<Integer> next() { if (!hasNext()) { throw new IllegalStateException( "Already collected all of the groups."); } final Set<Integer> ret = new HashSet<Integer>(); boolean allHasNext = true; hasNext = false; for (final Entry<Map<Integer, DataCell>, Iterator<Pair<RowKey, Integer>>> iterEntry : iterators .entrySet()) { final Iterator<Pair<RowKey, Integer>> iter = iterEntry .getValue(); if (!iter.hasNext()) { allHasNext = false; } else { hasNext = true; } } if (allHasNext || !haltIfError) { for (final Entry<Map<Integer, DataCell>, Iterator<Pair<RowKey, Integer>>> iterEntry : iterators .entrySet()) { final Iterator<Pair<RowKey, Integer>> iter = iterEntry .getValue(); if (iter.hasNext()) { final Pair<RowKey, Integer> next = iter .next(); final boolean added = ret.add(next .getSecond()); assert added || !added; } else { for (final Entry<Integer, DataCell> entry : iterEntry .getKey().entrySet()) { sb .append(entry.getValue()) .append("|") .append( entry.getKey() .intValue() + 1) .append("\n"); } } } } else {// !allHasNext && haltIfError boolean anyHasNext = false; Map<Integer, DataCell> sample = new HashMap<Integer, DataCell>(); for (final Entry<Map<Integer, DataCell>, Iterator<Pair<RowKey, Integer>>> iterEntry : iterators .entrySet()) { final Iterator<Pair<RowKey, Integer>> iter = iterEntry .getValue(); if (iter.hasNext()) { anyHasNext = true; } else { sample = iterEntry.getKey(); } } if (anyHasNext) { final StringBuilder errorMessage = new StringBuilder( HEADER); for (final Entry<Integer, DataCell> entry : sample .entrySet()) { errorMessage .append(entry.getValue()) .append("|") .append( entry.getKey().intValue() + 1) .append("\n"); } throw new IllegalStateException(errorMessage .toString()); } } final ArrayList<Integer> list = new ArrayList<Integer>( ret); Collections.sort(list); if (reverse) { Collections.reverse(list); } return list; } @SuppressWarnings("synthetic-access") @Override public boolean hasNext() { final boolean ret = hasNext; if (!ret && sb.length() > HEADER.length()) { logger.debug(sb); } return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } /** * Compute the row content to row indices/keys list map. * * @param inputTable * The input {@link BufferedDataTable}. * @param colIndices * The interesting column indices. * @return A mapping from the different row contents to the row identifiers * lists. */ private Map<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>> computePatternToKeys( final BufferedDataTable inputTable, final int[] colIndices) { final Map<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>> patternToKeys = new HashMap<Map<Integer, DataCell>, List<Pair<RowKey, Integer>>>(); { int rowIndex = 0; for (final DataRow row : inputTable) { final HashMap<Integer, DataCell> key = new HashMap<Integer, DataCell>(); for (int i = colIndices.length; i-- > 0;) { if (colIndices[i] != -1) { final DataCell cell = row.getCell(colIndices[i]); key.put(Integer.valueOf(colIndices[i]), cell); } } if (!patternToKeys.containsKey(key)) { patternToKeys.put(key, new ArrayList<Pair<RowKey, Integer>>()); } final List<Pair<RowKey, Integer>> list = patternToKeys.get(key); list.add(new Pair<RowKey, Integer>(row.getKey(), Integer .valueOf(rowIndex))); ++rowIndex; } } return patternToKeys; } /** * Selects the interesting column indices. * * @param columns * The name of interesting columns. * @param dataTableSpec * The {@link DataTableSpec}. * @return The indices of columns. Might contain {@code -1} values where not * found. * @see DataTableSpec#findColumnIndex(String) */ private int[] computeColIndices(final List<String> columns, final DataTableSpec dataTableSpec) { final int[] colIndices = new int[columns.size()]; { int index = 0; for (final String column : columns) { colIndices[index++] = dataTableSpec.findColumnIndex(column); } } return colIndices; } /** * {@inheritDoc} */ @Override protected void reset() { // No internal state } /** * {@inheritDoc} */ @Override protected DataTableSpec[] configure(final DataTableSpec[] inSpecs) throws InvalidSettingsException { if (!sortInMemory.getBooleanValue()) { throw new InvalidSettingsException( "Only in-memory sorting is implemented yet."); } if (mergeColumns.getIncludeList().isEmpty()) { throw new IllegalArgumentException( "At least one column have to be selected."); } Misc.checkList(mergeColumns.getIncludeList(), inSpecs[0]); Misc.checkList(mergeColumns.getExcludeList(), inSpecs[0]); return inSpecs; } /** * {@inheritDoc} */ @Override protected void saveSettingsTo(final NodeSettingsWO settings) { mergeColumns.saveSettingsTo(settings); sortInMemory.saveSettingsTo(settings); haltOnError.saveSettingsTo(settings); sortOrderReversed.saveSettingsTo(settings); } /** * {@inheritDoc} */ @Override protected void loadValidatedSettingsFrom(final NodeSettingsRO settings) throws InvalidSettingsException { mergeColumns.loadSettingsFrom(settings); sortInMemory.loadSettingsFrom(settings); haltOnError.loadSettingsFrom(settings); sortOrderReversed.loadSettingsFrom(settings); } /** * {@inheritDoc} */ @Override protected void validateSettings(final NodeSettingsRO settings) throws InvalidSettingsException { mergeColumns.validateSettings(settings); sortInMemory.validateSettings(settings); haltOnError.validateSettings(settings); sortOrderReversed.validateSettings(settings); } /** * {@inheritDoc} */ @Override protected void loadInternals(final File internDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException { // No internal state } /** * {@inheritDoc} */ @Override protected void saveInternals(final File internDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException { // No internal state } }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2015 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.workflow.bulk.removal.impl; import com.adobe.acs.commons.util.InfoWriter; import com.adobe.acs.commons.workflow.bulk.removal.WorkflowInstanceRemover; import com.adobe.acs.commons.workflow.bulk.removal.WorkflowRemovalConfig; import com.adobe.acs.commons.workflow.bulk.removal.WorkflowRemovalException; import com.adobe.acs.commons.workflow.bulk.removal.WorkflowRemovalForceQuitException; import org.apache.commons.lang.StringUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.ConfigurationPolicy; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.resource.LoginException; import org.apache.sling.api.resource.PersistenceException; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceResolverFactory; import org.apache.sling.commons.osgi.PropertiesUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.regex.Pattern; @Component( label = "ACS AEM Commons - Workflow Instance Remover - Scheduled Service", metatype = true, configurationFactory = true, policy = ConfigurationPolicy.REQUIRE ) @Properties({ @Property( label = "Cron expression defining when this Scheduled Service will run", description = "[12:01am daily = 0 1 0 ? * *]; see www.cronmaker.com", name = "scheduler.expression", value = "0 1 0 ? * *" ), @Property( label = "Allow concurrent executions", description = "Allow concurrent executions of this Scheduled Service", name = "scheduler.concurrent", boolValue = false, propertyPrivate = true ), @Property( name = "webconsole.configurationFactory.nameHint", propertyPrivate = true, value = "Runs at '{scheduler.expression}' on models [{workflow.models}] with status [{workflow.statuses}]" ) }) @Service public class WorkflowInstanceRemoverScheduler implements Runnable { private static final Logger log = LoggerFactory.getLogger(WorkflowInstanceRemoverScheduler.class); @Reference private ResourceResolverFactory resourceResolverFactory; @Reference private WorkflowInstanceRemover workflowInstanceRemover; private static final String SERVICE_NAME = "workflow-remover"; private static final Map<String, Object> AUTH_INFO; static { AUTH_INFO = Collections.singletonMap(ResourceResolverFactory.SUBSERVICE, (Object) SERVICE_NAME); } private static final String[] DEFAULT_WORKFLOW_STATUSES = {"COMPLETED", "ABORTED"}; private List<String> statuses = new ArrayList<String>(); @Property(label = "Workflow Status", description = "Only remove Workflow Instances that have one of these statuses.", value = { "COMPLETED", "ABORTED" }) public static final String PROP_WORKFLOW_STATUSES = "workflow.statuses"; private static final String[] DEFAULT_WORKFLOW_MODELS = {}; private List<String> models = new ArrayList<String>(); @Property(label = "Workflow Models", description = "Only remove Workflow Instances that belong to one of these WF Models.", cardinality = Integer.MAX_VALUE, value = { }) public static final String PROP_WORKFLOW_MODELS = "workflow.models"; private static final String[] DEFAULT_WORKFLOW_PAYLOADS = {}; private List<Pattern> payloads = new ArrayList<Pattern>(); @Property(label = "Payload Patterns", description = "Only remove Workflow Instances whose payloads match one of these regex patterns", cardinality = Integer.MAX_VALUE, value = { }) public static final String PROP_WORKFLOW_PAYLOADS = "workflow.payloads"; private Calendar olderThan = null; @Property(label = "Older Than UTC TS", description = "Only remove Workflow Instances whose payloads are older than this UTC Time in Millis", longValue = 0) public static final String PROP_WORKFLOWS_OLDER_THAN = "workflow.older-than"; private static final long DEFAULT_OLDER_THAN_MILLIS = -1L; private long olderThanMillis = DEFAULT_OLDER_THAN_MILLIS; @Property(label = "Older Than Milliseconds", description = "Only remove Workflow Instances whose payloads start date was at least desired Milliseconds ago", longValue = DEFAULT_OLDER_THAN_MILLIS) public static final String PROP_WORKFLOWS_OLDER_THAN_MILLIS = "workflow.older-than-millis"; private static final int DEFAULT_BATCH_SIZE = 1000; private int batchSize = DEFAULT_BATCH_SIZE; @Property(label = "Batch Size", description = "Save removals to JCR in batches of this defined size.", intValue = DEFAULT_BATCH_SIZE) public static final String PROP_BATCH_SIZE = "batch-size"; private static final int DEFAULT_MAX_DURATION = 0; private int maxDuration = DEFAULT_MAX_DURATION; @Property(label = "Max duration (in minutes)", description = "Max number of minutes this workflow removal process can execute. 0 for no limit. " + "[ Default: 0 ]", intValue = DEFAULT_MAX_DURATION) public static final String PROP_MAX_DURATION = "max-duration"; @Override @SuppressWarnings("squid:S2142") public final void run() { try (ResourceResolver adminResourceResolver = resourceResolverFactory.getServiceResourceResolver(AUTH_INFO)){ final long start = System.currentTimeMillis(); WorkflowRemovalConfig workflowRemovalConfig = new WorkflowRemovalConfig(models,statuses,payloads,olderThan,olderThanMillis); workflowRemovalConfig.setBatchSize(batchSize); workflowRemovalConfig.setMaxDurationInMins(maxDuration); int count = workflowInstanceRemover.removeWorkflowInstances(adminResourceResolver, workflowRemovalConfig); if (log.isInfoEnabled()) { log.info("Removed [ {} ] Workflow instances in {} ms", count, System.currentTimeMillis() - start); } } catch (LoginException e) { log.error("Login Exception when getting admin resource resolver", e); } catch (PersistenceException e) { log.error("Persistence Exception when saving Workflow Instances removal", e); } catch (WorkflowRemovalException e) { log.error("Workflow Removal exception during Workflow Removal", e); } catch (InterruptedException e) { log.error("Interrupted Exception during Workflow Removal", e); } catch (WorkflowRemovalForceQuitException e) { log.info("Workflow Removal force quit", e); } } private List<String> arrayToList(String[] array) { List<String> list = new ArrayList<String>(); for (String element : array) { if (StringUtils.isNotBlank(element)) { list.add(element); } } return list; } @Activate protected final void activate(final Map<String, String> config) { statuses = arrayToList(PropertiesUtil.toStringArray(config.get(PROP_WORKFLOW_STATUSES), DEFAULT_WORKFLOW_STATUSES)); models = arrayToList(PropertiesUtil.toStringArray(config.get(PROP_WORKFLOW_MODELS), DEFAULT_WORKFLOW_MODELS)); final String[] payloadsArray = PropertiesUtil.toStringArray(config.get(PROP_WORKFLOW_PAYLOADS), DEFAULT_WORKFLOW_PAYLOADS); for (final String payload : payloadsArray) { if (StringUtils.isNotBlank(payload)) { final Pattern p = Pattern.compile(payload); if (p != null) { payloads.add(p); } } } final Long olderThanTs = PropertiesUtil.toLong(config.get(PROP_WORKFLOWS_OLDER_THAN), 0); if (olderThanTs > 0) { olderThan = Calendar.getInstance(); olderThan.setTimeInMillis(olderThanTs); } olderThanMillis = PropertiesUtil.toLong(config.get(PROP_WORKFLOWS_OLDER_THAN_MILLIS), 0); batchSize = PropertiesUtil.toInteger(config.get(PROP_BATCH_SIZE), DEFAULT_BATCH_SIZE); if (batchSize < 1) { batchSize = DEFAULT_BATCH_SIZE; } maxDuration = PropertiesUtil.toInteger(config.get(PROP_MAX_DURATION), DEFAULT_MAX_DURATION); final InfoWriter iw = new InfoWriter(); iw.title("Workflow Instance Removal Configuration"); iw.message("Workflow status: {}", statuses); iw.message("Workflow models: {}", models); iw.message("Payloads: {}", Arrays.asList(payloadsArray)); iw.message("Older than: {}", olderThan); iw.message("Batch size: {}", batchSize); iw.message("Max Duration (minutes): {}", maxDuration); iw.end(); log.info(iw.toString()); } @Deactivate protected final void deactivate(final Map<String, String> config) { olderThan = null; statuses = new ArrayList<String>(); models = new ArrayList<String>(); payloads = new ArrayList<Pattern>(); batchSize = DEFAULT_BATCH_SIZE; maxDuration = DEFAULT_MAX_DURATION; } }
package org.apache.lucene.codecs; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Closeable; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; /** * Abstract API that consumes numeric, binary and * sorted docvalues. Concrete implementations of this * actually do "something" with the docvalues (write it into * the index in a specific format). * <p> * The lifecycle is: * <ol> * <li>DocValuesConsumer is created by * {@link DocValuesFormat#fieldsConsumer(SegmentWriteState)} or * {@link NormsFormat#normsConsumer(SegmentWriteState)}. * <li>{@link #addNumericField}, {@link #addBinaryField}, * or {@link #addSortedField} are called for each Numeric, * Binary, or Sorted docvalues field. The API is a "pull" rather * than "push", and the implementation is free to iterate over the * values multiple times ({@link Iterable#iterator()}). * <li>After all fields are added, the consumer is {@link #close}d. * </ol> * * @lucene.experimental */ public abstract class DocValuesConsumer implements Closeable { /** Sole constructor. (For invocation by subclass * constructors, typically implicit.) */ protected DocValuesConsumer() {} /** * Writes numeric docvalues for a field. * @param field field information * @param values Iterable of numeric values (one for each document). {@code null} indicates * a missing value. * @throws IOException if an I/O error occurred. */ public abstract void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException; /** * Writes binary docvalues for a field. * @param field field information * @param values Iterable of binary values (one for each document). {@code null} indicates * a missing value. * @throws IOException if an I/O error occurred. */ public abstract void addBinaryField(FieldInfo field, Iterable<BytesRef> values) throws IOException; /** * Writes pre-sorted binary docvalues for a field. * @param field field information * @param values Iterable of binary values in sorted order (deduplicated). * @param docToOrd Iterable of ordinals (one for each document). {@code -1} indicates * a missing value. * @throws IOException if an I/O error occurred. */ public abstract void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException; /** * Writes pre-sorted set docvalues for a field * @param field field information * @param values Iterable of binary values in sorted order (deduplicated). * @param docToOrdCount Iterable of the number of values for each document. A zero ordinal * count indicates a missing value. * @param ords Iterable of ordinal occurrences (docToOrdCount*maxDoc total). * @throws IOException if an I/O error occurred. */ public abstract void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrdCount, Iterable<Number> ords) throws IOException; /** * Merges the numeric docvalues from <code>toMerge</code>. * <p> * The default implementation calls {@link #addNumericField}, passing * an Iterable that merges and filters deleted documents on the fly. */ public void mergeNumericField(final FieldInfo fieldInfo, final MergeState mergeState, final List<NumericDocValues> toMerge, final List<Bits> docsWithField) throws IOException { addNumericField(fieldInfo, new Iterable<Number>() { @Override public Iterator<Number> iterator() { return new Iterator<Number>() { int readerUpto = -1; int docIDUpto; Long nextValue; AtomicReader currentReader; NumericDocValues currentValues; Bits currentLiveDocs; Bits currentDocsWithField; boolean nextIsSet; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Number next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; nextIsSet = false; return nextValue; } private boolean setNext() { while (true) { if (readerUpto == toMerge.size()) { return false; } if (currentReader == null || docIDUpto == currentReader.maxDoc()) { readerUpto++; if (readerUpto < toMerge.size()) { currentReader = mergeState.readers.get(readerUpto); currentValues = toMerge.get(readerUpto); currentLiveDocs = currentReader.getLiveDocs(); currentDocsWithField = docsWithField.get(readerUpto); } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { nextIsSet = true; if (currentDocsWithField.get(docIDUpto)) { nextValue = currentValues.get(docIDUpto); } else { nextValue = null; } docIDUpto++; return true; } docIDUpto++; } } }; } }); } /** * Merges the binary docvalues from <code>toMerge</code>. * <p> * The default implementation calls {@link #addBinaryField}, passing * an Iterable that merges and filters deleted documents on the fly. */ public void mergeBinaryField(FieldInfo fieldInfo, final MergeState mergeState, final List<BinaryDocValues> toMerge, final List<Bits> docsWithField) throws IOException { addBinaryField(fieldInfo, new Iterable<BytesRef>() { @Override public Iterator<BytesRef> iterator() { return new Iterator<BytesRef>() { int readerUpto = -1; int docIDUpto; BytesRef nextValue = new BytesRef(); BytesRef nextPointer; // points to null if missing, or nextValue AtomicReader currentReader; BinaryDocValues currentValues; Bits currentLiveDocs; Bits currentDocsWithField; boolean nextIsSet; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public BytesRef next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; nextIsSet = false; return nextPointer; } private boolean setNext() { while (true) { if (readerUpto == toMerge.size()) { return false; } if (currentReader == null || docIDUpto == currentReader.maxDoc()) { readerUpto++; if (readerUpto < toMerge.size()) { currentReader = mergeState.readers.get(readerUpto); currentValues = toMerge.get(readerUpto); currentDocsWithField = docsWithField.get(readerUpto); currentLiveDocs = currentReader.getLiveDocs(); } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { nextIsSet = true; if (currentDocsWithField.get(docIDUpto)) { currentValues.get(docIDUpto, nextValue); nextPointer = nextValue; } else { nextPointer = null; } docIDUpto++; return true; } docIDUpto++; } } }; } }); } /** * Merges the sorted docvalues from <code>toMerge</code>. * <p> * The default implementation calls {@link #addSortedField}, passing * an Iterable that merges ordinals and values and filters deleted documents . */ public void mergeSortedField(FieldInfo fieldInfo, final MergeState mergeState, List<SortedDocValues> toMerge) throws IOException { final AtomicReader readers[] = mergeState.readers.toArray(new AtomicReader[toMerge.size()]); final SortedDocValues dvs[] = toMerge.toArray(new SortedDocValues[toMerge.size()]); // step 1: iterate thru each sub and mark terms still in use TermsEnum liveTerms[] = new TermsEnum[dvs.length]; for (int sub = 0; sub < liveTerms.length; sub++) { AtomicReader reader = readers[sub]; SortedDocValues dv = dvs[sub]; Bits liveDocs = reader.getLiveDocs(); if (liveDocs == null) { liveTerms[sub] = dv.termsEnum(); } else { LongBitSet bitset = new LongBitSet(dv.getValueCount()); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs.get(i)) { int ord = dv.getOrd(i); if (ord >= 0) { bitset.set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.termsEnum(), bitset); } } // step 2: create ordinal map (this conceptually does the "merging") final OrdinalMap map = new OrdinalMap(this, liveTerms); // step 3: add field addSortedField(fieldInfo, // ord -> value new Iterable<BytesRef>() { @Override public Iterator<BytesRef> iterator() { return new Iterator<BytesRef>() { final BytesRef scratch = new BytesRef(); int currentOrd; @Override public boolean hasNext() { return currentOrd < map.getValueCount(); } @Override public BytesRef next() { if (!hasNext()) { throw new NoSuchElementException(); } int segmentNumber = map.getFirstSegmentNumber(currentOrd); int segmentOrd = (int)map.getFirstSegmentOrd(currentOrd); dvs[segmentNumber].lookupOrd(segmentOrd, scratch); currentOrd++; return scratch; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }, // doc -> ord new Iterable<Number>() { @Override public Iterator<Number> iterator() { return new Iterator<Number>() { int readerUpto = -1; int docIDUpto; int nextValue; AtomicReader currentReader; Bits currentLiveDocs; boolean nextIsSet; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Number next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; nextIsSet = false; // TODO make a mutable number return nextValue; } private boolean setNext() { while (true) { if (readerUpto == readers.length) { return false; } if (currentReader == null || docIDUpto == currentReader.maxDoc()) { readerUpto++; if (readerUpto < readers.length) { currentReader = readers[readerUpto]; currentLiveDocs = currentReader.getLiveDocs(); } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { nextIsSet = true; int segOrd = dvs[readerUpto].getOrd(docIDUpto); nextValue = segOrd == -1 ? -1 : (int) map.getGlobalOrd(readerUpto, segOrd); docIDUpto++; return true; } docIDUpto++; } } }; } } ); } /** * Merges the sortedset docvalues from <code>toMerge</code>. * <p> * The default implementation calls {@link #addSortedSetField}, passing * an Iterable that merges ordinals and values and filters deleted documents . */ public void mergeSortedSetField(FieldInfo fieldInfo, final MergeState mergeState, List<SortedSetDocValues> toMerge) throws IOException { final AtomicReader readers[] = mergeState.readers.toArray(new AtomicReader[toMerge.size()]); final SortedSetDocValues dvs[] = toMerge.toArray(new SortedSetDocValues[toMerge.size()]); // step 1: iterate thru each sub and mark terms still in use TermsEnum liveTerms[] = new TermsEnum[dvs.length]; for (int sub = 0; sub < liveTerms.length; sub++) { AtomicReader reader = readers[sub]; SortedSetDocValues dv = dvs[sub]; Bits liveDocs = reader.getLiveDocs(); if (liveDocs == null) { liveTerms[sub] = dv.termsEnum(); } else { LongBitSet bitset = new LongBitSet(dv.getValueCount()); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs.get(i)) { dv.setDocument(i); long ord; while ((ord = dv.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { bitset.set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.termsEnum(), bitset); } } // step 2: create ordinal map (this conceptually does the "merging") final OrdinalMap map = new OrdinalMap(this, liveTerms); // step 3: add field addSortedSetField(fieldInfo, // ord -> value new Iterable<BytesRef>() { @Override public Iterator<BytesRef> iterator() { return new Iterator<BytesRef>() { final BytesRef scratch = new BytesRef(); long currentOrd; @Override public boolean hasNext() { return currentOrd < map.getValueCount(); } @Override public BytesRef next() { if (!hasNext()) { throw new NoSuchElementException(); } int segmentNumber = map.getFirstSegmentNumber(currentOrd); long segmentOrd = map.getFirstSegmentOrd(currentOrd); dvs[segmentNumber].lookupOrd(segmentOrd, scratch); currentOrd++; return scratch; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }, // doc -> ord count new Iterable<Number>() { @Override public Iterator<Number> iterator() { return new Iterator<Number>() { int readerUpto = -1; int docIDUpto; int nextValue; AtomicReader currentReader; Bits currentLiveDocs; boolean nextIsSet; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Number next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; nextIsSet = false; // TODO make a mutable number return nextValue; } private boolean setNext() { while (true) { if (readerUpto == readers.length) { return false; } if (currentReader == null || docIDUpto == currentReader.maxDoc()) { readerUpto++; if (readerUpto < readers.length) { currentReader = readers[readerUpto]; currentLiveDocs = currentReader.getLiveDocs(); } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { nextIsSet = true; SortedSetDocValues dv = dvs[readerUpto]; dv.setDocument(docIDUpto); nextValue = 0; while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { nextValue++; } docIDUpto++; return true; } docIDUpto++; } } }; } }, // ords new Iterable<Number>() { @Override public Iterator<Number> iterator() { return new Iterator<Number>() { int readerUpto = -1; int docIDUpto; long nextValue; AtomicReader currentReader; Bits currentLiveDocs; boolean nextIsSet; long ords[] = new long[8]; int ordUpto; int ordLength; @Override public boolean hasNext() { return nextIsSet || setNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Number next() { if (!hasNext()) { throw new NoSuchElementException(); } assert nextIsSet; nextIsSet = false; // TODO make a mutable number return nextValue; } private boolean setNext() { while (true) { if (readerUpto == readers.length) { return false; } if (ordUpto < ordLength) { nextValue = ords[ordUpto]; ordUpto++; nextIsSet = true; return true; } if (currentReader == null || docIDUpto == currentReader.maxDoc()) { readerUpto++; if (readerUpto < readers.length) { currentReader = readers[readerUpto]; currentLiveDocs = currentReader.getLiveDocs(); } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { assert docIDUpto < currentReader.maxDoc(); SortedSetDocValues dv = dvs[readerUpto]; dv.setDocument(docIDUpto); ordUpto = ordLength = 0; long ord; while ((ord = dv.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { if (ordLength == ords.length) { ords = ArrayUtil.grow(ords, ordLength+1); } ords[ordLength] = map.getGlobalOrd(readerUpto, ord); ordLength++; } docIDUpto++; continue; } docIDUpto++; } } }; } } ); } // TODO: seek-by-ord to nextSetBit static class BitsFilteredTermsEnum extends FilteredTermsEnum { final LongBitSet liveTerms; BitsFilteredTermsEnum(TermsEnum in, LongBitSet liveTerms) { super(in, false); // <-- not passing false here wasted about 3 hours of my time!!!!!!!!!!!!! assert liveTerms != null; this.liveTerms = liveTerms; } @Override protected AcceptStatus accept(BytesRef term) throws IOException { if (liveTerms.get(ord())) { return AcceptStatus.YES; } else { return AcceptStatus.NO; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.catalog; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException; import org.apache.flink.table.catalog.exceptions.FunctionNotExistException; import org.apache.flink.table.delegation.PlannerTypeInferenceUtil; import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.functions.AggregateFunctionDefinition; import org.apache.flink.table.functions.FunctionDefinition; import org.apache.flink.table.functions.FunctionIdentifier; import org.apache.flink.table.functions.ImperativeAggregateFunction; import org.apache.flink.table.functions.ScalarFunction; import org.apache.flink.table.functions.ScalarFunctionDefinition; import org.apache.flink.table.functions.TableAggregateFunction; import org.apache.flink.table.functions.TableAggregateFunctionDefinition; import org.apache.flink.table.functions.TableFunction; import org.apache.flink.table.functions.TableFunctionDefinition; import org.apache.flink.table.functions.UserDefinedFunction; import org.apache.flink.table.functions.UserDefinedFunctionHelper; import org.apache.flink.table.module.ModuleManager; import org.apache.flink.util.Preconditions; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Simple function catalog to store {@link FunctionDefinition}s in catalogs. * * <p>Note: This class can be cleaned up a lot once we drop the methods deprecated as part of * FLIP-65. In the long-term, the class should be a part of catalog manager similar to {@link * DataTypeFactory}. */ @Internal public final class FunctionCatalog { private final ReadableConfig config; private final CatalogManager catalogManager; private final ModuleManager moduleManager; private final Map<String, CatalogFunction> tempSystemFunctions = new LinkedHashMap<>(); private final Map<ObjectIdentifier, CatalogFunction> tempCatalogFunctions = new LinkedHashMap<>(); /** * Temporary utility until the new type inference is fully functional. It needs to be set by the * planner. */ private PlannerTypeInferenceUtil plannerTypeInferenceUtil; public FunctionCatalog( TableConfig config, CatalogManager catalogManager, ModuleManager moduleManager) { this.config = checkNotNull(config).getConfiguration(); this.catalogManager = checkNotNull(catalogManager); this.moduleManager = checkNotNull(moduleManager); } public void setPlannerTypeInferenceUtil(PlannerTypeInferenceUtil plannerTypeInferenceUtil) { this.plannerTypeInferenceUtil = plannerTypeInferenceUtil; } /** Registers a temporary system function. */ public void registerTemporarySystemFunction( String name, FunctionDefinition definition, boolean ignoreIfExists) { registerTemporarySystemFunction( name, new InlineCatalogFunction(definition), ignoreIfExists); } /** Registers a uninstantiated temporary system function. */ public void registerTemporarySystemFunction( String name, String fullyQualifiedName, FunctionLanguage language, boolean ignoreIfExists) { registerTemporarySystemFunction( name, new CatalogFunctionImpl(fullyQualifiedName, language), ignoreIfExists); } /** Drops a temporary system function. Returns true if a function was dropped. */ public boolean dropTemporarySystemFunction(String name, boolean ignoreIfNotExist) { final String normalizedName = FunctionIdentifier.normalizeName(name); final CatalogFunction function = tempSystemFunctions.remove(normalizedName); if (function == null && !ignoreIfNotExist) { throw new ValidationException( String.format( "Could not drop temporary system function. A function named '%s' doesn't exist.", name)); } return function != null; } /** Registers a temporary catalog function. */ public void registerTemporaryCatalogFunction( UnresolvedIdentifier unresolvedIdentifier, FunctionDefinition definition, boolean ignoreIfExists) { registerTemporaryCatalogFunction( unresolvedIdentifier, new InlineCatalogFunction(definition), ignoreIfExists); } /** Registers a uninstantiated temporary catalog function. */ public void registerTemporaryCatalogFunction( UnresolvedIdentifier unresolvedIdentifier, CatalogFunction catalogFunction, boolean ignoreIfExists) { final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); final ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(identifier); if (!tempCatalogFunctions.containsKey(normalizedIdentifier)) { Optional<TemporaryOperationListener> listener = catalogManager.getTemporaryOperationListener(normalizedIdentifier); if (listener.isPresent()) { catalogFunction = listener.get() .onCreateTemporaryFunction( normalizedIdentifier.toObjectPath(), catalogFunction); } try { validateAndPrepareFunction(catalogFunction); } catch (Throwable t) { throw new ValidationException( String.format( "Could not register temporary catalog function '%s' due to implementation errors.", identifier.asSummaryString()), t); } tempCatalogFunctions.put(normalizedIdentifier, catalogFunction); } else if (!ignoreIfExists) { throw new ValidationException( String.format( "Could not register temporary catalog function. A function '%s' does already exist.", identifier.asSummaryString())); } } /** Drops a temporary catalog function. Returns true if a function was dropped. */ public boolean dropTemporaryCatalogFunction( UnresolvedIdentifier unresolvedIdentifier, boolean ignoreIfNotExist) { final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); return dropTempCatalogFunction(identifier, ignoreIfNotExist) != null; } /** Registers a catalog function by also considering temporary catalog functions. */ public void registerCatalogFunction( UnresolvedIdentifier unresolvedIdentifier, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) { final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); final ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(identifier); try { UserDefinedFunctionHelper.validateClass(functionClass); } catch (Throwable t) { throw new ValidationException( String.format( "Could not register catalog function '%s' due to implementation errors.", identifier.asSummaryString()), t); } final Catalog catalog = catalogManager .getCatalog(normalizedIdentifier.getCatalogName()) .orElseThrow(IllegalStateException::new); final ObjectPath path = identifier.toObjectPath(); // we force users to deal with temporary catalog functions first if (tempCatalogFunctions.containsKey(normalizedIdentifier)) { if (ignoreIfExists) { return; } throw new ValidationException( String.format( "Could not register catalog function. A temporary function '%s' does already exist. " + "Please drop the temporary function first.", identifier.asSummaryString())); } if (catalog.functionExists(path)) { if (ignoreIfExists) { return; } throw new ValidationException( String.format( "Could not register catalog function. A function '%s' does already exist.", identifier.asSummaryString())); } final CatalogFunction catalogFunction = new CatalogFunctionImpl(functionClass.getName(), FunctionLanguage.JAVA); try { catalog.createFunction(path, catalogFunction, ignoreIfExists); } catch (Throwable t) { throw new TableException( String.format( "Could not register catalog function '%s'.", identifier.asSummaryString()), t); } } /** * Drops a catalog function by also considering temporary catalog functions. Returns true if a * function was dropped. */ public boolean dropCatalogFunction( UnresolvedIdentifier unresolvedIdentifier, boolean ignoreIfNotExist) { final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); final ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(identifier); final Catalog catalog = catalogManager .getCatalog(normalizedIdentifier.getCatalogName()) .orElseThrow(IllegalStateException::new); final ObjectPath path = identifier.toObjectPath(); // we force users to deal with temporary catalog functions first if (tempCatalogFunctions.containsKey(normalizedIdentifier)) { throw new ValidationException( String.format( "Could not drop catalog function. A temporary function '%s' does already exist. " + "Please drop the temporary function first.", identifier.asSummaryString())); } if (!catalog.functionExists(path)) { if (ignoreIfNotExist) { return false; } throw new ValidationException( String.format( "Could not drop catalog function. A function '%s' doesn't exist.", identifier.asSummaryString())); } try { catalog.dropFunction(path, ignoreIfNotExist); } catch (Throwable t) { throw new TableException( String.format( "Could not drop catalog function '%s'.", identifier.asSummaryString()), t); } return true; } /** * Get names of all user defined functions, including temp system functions, temp catalog * functions and catalog functions in the current catalog and current database. */ public String[] getUserDefinedFunctions() { return getUserDefinedFunctionNames().toArray(new String[0]); } /** * Get names of all functions, including temp system functions, system functions, temp catalog * functions and catalog functions in the current catalog and current database. */ public String[] getFunctions() { Set<String> result = getUserDefinedFunctionNames(); // add system functions result.addAll(moduleManager.listFunctions()); return result.toArray(new String[0]); } /** * Check whether a temporary catalog function is already registered. * * @param functionIdentifier the object identifier of function * @return whether the temporary catalog function exists in the function catalog */ public boolean hasTemporaryCatalogFunction(ObjectIdentifier functionIdentifier) { ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(functionIdentifier); return tempCatalogFunctions.containsKey(normalizedIdentifier); } /** * Check whether a temporary system function is already registered. * * @param functionName the name of the function * @return whether the temporary system function exists in the function catalog */ public boolean hasTemporarySystemFunction(String functionName) { return tempSystemFunctions.containsKey(functionName); } /** * Creates a {@link FunctionLookup} to this {@link FunctionCatalog}. * * @param parser parser to use for parsing identifiers */ public FunctionLookup asLookup(Function<String, UnresolvedIdentifier> parser) { return new FunctionLookup() { @Override public Optional<Result> lookupFunction(String stringIdentifier) { UnresolvedIdentifier unresolvedIdentifier = parser.apply(stringIdentifier); return lookupFunction(unresolvedIdentifier); } @Override public Optional<FunctionLookup.Result> lookupFunction(UnresolvedIdentifier identifier) { return FunctionCatalog.this.lookupFunction(identifier); } @Override public PlannerTypeInferenceUtil getPlannerTypeInferenceUtil() { Preconditions.checkNotNull( plannerTypeInferenceUtil, "A planner should have set the type inference utility."); return plannerTypeInferenceUtil; } }; } public Optional<FunctionLookup.Result> lookupFunction(UnresolvedIdentifier identifier) { // precise function reference if (identifier.getDatabaseName().isPresent()) { return resolvePreciseFunctionReference(catalogManager.qualifyIdentifier(identifier)); } else { // ambiguous function reference return resolveAmbiguousFunctionReference(identifier.getObjectName()); } } // -------------------------------------------------------------------------------------------- // Legacy function handling before FLIP-65 // -------------------------------------------------------------------------------------------- /** * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} * instead. */ @Deprecated public void registerTempSystemScalarFunction(String name, ScalarFunction function) { UserDefinedFunctionHelper.prepareInstance(config, function); registerTempSystemFunction(name, new ScalarFunctionDefinition(name, function)); } /** * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} * instead. */ @Deprecated public <T> void registerTempSystemTableFunction( String name, TableFunction<T> function, TypeInformation<T> resultType) { UserDefinedFunctionHelper.prepareInstance(config, function); registerTempSystemFunction(name, new TableFunctionDefinition(name, function, resultType)); } /** * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} * instead. */ @Deprecated public <T, ACC> void registerTempSystemAggregateFunction( String name, ImperativeAggregateFunction<T, ACC> function, TypeInformation<T> resultType, TypeInformation<ACC> accType) { UserDefinedFunctionHelper.prepareInstance(config, function); final FunctionDefinition definition; if (function instanceof AggregateFunction) { definition = new AggregateFunctionDefinition( name, (AggregateFunction<?, ?>) function, resultType, accType); } else if (function instanceof TableAggregateFunction) { definition = new TableAggregateFunctionDefinition( name, (TableAggregateFunction<?, ?>) function, resultType, accType); } else { throw new TableException("Unknown function class: " + function.getClass()); } registerTempSystemFunction(name, definition); } /** * @deprecated Use {@link #registerTemporaryCatalogFunction(UnresolvedIdentifier, * FunctionDefinition, boolean)} instead. */ @Deprecated public void registerTempCatalogScalarFunction(ObjectIdentifier oi, ScalarFunction function) { UserDefinedFunctionHelper.prepareInstance(config, function); registerTempCatalogFunction(oi, new ScalarFunctionDefinition(oi.getObjectName(), function)); } /** * Drop a temporary catalog function. * * @param identifier identifier of the function * @param ignoreIfNotExist Flag to specify behavior when the function does not exist: if set to * false, throw an exception, if set to true, do nothing. * @return the removed catalog function, which is null if function doesn't exist and * ignoreIfNotExist is true. */ public CatalogFunction dropTempCatalogFunction( ObjectIdentifier identifier, boolean ignoreIfNotExist) { ObjectIdentifier normalizedName = FunctionIdentifier.normalizeObjectIdentifier(identifier); CatalogFunction fd = tempCatalogFunctions.get(normalizedName); if (fd != null) { catalogManager .getTemporaryOperationListener(normalizedName) .ifPresent(l -> l.onDropTemporaryFunction(normalizedName.toObjectPath())); tempCatalogFunctions.remove(normalizedName); } else if (!ignoreIfNotExist) { throw new ValidationException( String.format("Temporary catalog function %s doesn't exist", identifier)); } return fd; } /** * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} * instead. */ @Deprecated private void registerTempSystemFunction(String name, FunctionDefinition functionDefinition) { // This method is called by the interface which uses the old type inference, // e.g. TableEnvironment#registerFunction // In this case the UDF is wrapped by ScalarFunctionDefinition, TableFunctionDefinition, // etc. // The raw UDFs will be validated and cleaned before being wrapped, so just put them to the // map // in this method. tempSystemFunctions.put( FunctionIdentifier.normalizeName(name), new InlineCatalogFunction(functionDefinition)); } /** * @deprecated Use {@link #registerTemporaryCatalogFunction(UnresolvedIdentifier, * FunctionDefinition, boolean)} instead. */ @Deprecated private void registerTempCatalogFunction( ObjectIdentifier oi, FunctionDefinition functionDefinition) { // This method is called by the interface which uses the old type inference, // but there is no TableEnvironment-level public API uses this method now. // In this case the UDFs are wrapped by ScalarFunctionDefinition, TableFunctionDefinition, // etc. // The raw UDFs will be validated and cleaned before being wrapped, so just put them to the // map // in this method. tempCatalogFunctions.put( FunctionIdentifier.normalizeObjectIdentifier(oi), new InlineCatalogFunction(functionDefinition)); } private void registerTemporarySystemFunction( String name, CatalogFunction function, boolean ignoreIfExists) { final String normalizedName = FunctionIdentifier.normalizeName(name); try { validateAndPrepareFunction(function); } catch (Throwable t) { throw new ValidationException( String.format( "Could not register temporary system function '%s' due to implementation errors.", name), t); } if (!tempSystemFunctions.containsKey(normalizedName)) { tempSystemFunctions.put(normalizedName, function); } else if (!ignoreIfExists) { throw new ValidationException( String.format( "Could not register temporary system function. A function named '%s' does already exist.", name)); } } // -------------------------------------------------------------------------------------------- private Set<String> getUserDefinedFunctionNames() { // add temp system functions Set<String> result = new HashSet<>(tempSystemFunctions.keySet()); String currentCatalog = catalogManager.getCurrentCatalog(); String currentDatabase = catalogManager.getCurrentDatabase(); // add temp catalog functions result.addAll( tempCatalogFunctions.keySet().stream() .filter( oi -> oi.getCatalogName().equals(currentCatalog) && oi.getDatabaseName().equals(currentDatabase)) .map(ObjectIdentifier::getObjectName) .collect(Collectors.toSet())); // add catalog functions Catalog catalog = catalogManager.getCatalog(currentCatalog).get(); try { result.addAll(catalog.listFunctions(currentDatabase)); } catch (DatabaseNotExistException e) { // Ignore since there will always be a current database of the current catalog } return result; } private Optional<FunctionLookup.Result> resolvePreciseFunctionReference(ObjectIdentifier oi) { // resolve order: // 1. Temporary functions // 2. Catalog functions ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(oi); CatalogFunction potentialResult = tempCatalogFunctions.get(normalizedIdentifier); if (potentialResult != null) { return Optional.of( new FunctionLookup.Result( FunctionIdentifier.of(oi), getFunctionDefinition(oi.getObjectName(), potentialResult))); } Optional<Catalog> catalogOptional = catalogManager.getCatalog(oi.getCatalogName()); if (catalogOptional.isPresent()) { Catalog catalog = catalogOptional.get(); try { CatalogFunction catalogFunction = catalog.getFunction( new ObjectPath(oi.getDatabaseName(), oi.getObjectName())); FunctionDefinition fd; if (catalog.getFunctionDefinitionFactory().isPresent() && catalogFunction.getFunctionLanguage() != FunctionLanguage.PYTHON) { fd = catalog.getFunctionDefinitionFactory() .get() .createFunctionDefinition(oi.getObjectName(), catalogFunction); } else { fd = getFunctionDefinition(oi.asSummaryString(), catalogFunction); } return Optional.of(new FunctionLookup.Result(FunctionIdentifier.of(oi), fd)); } catch (FunctionNotExistException e) { // Ignore } } return Optional.empty(); } private Optional<FunctionLookup.Result> resolveAmbiguousFunctionReference(String funcName) { // resolve order: // 1. Temporary system functions // 2. System functions // 3. Temporary catalog functions // 4. Catalog functions String normalizedName = FunctionIdentifier.normalizeName(funcName); if (tempSystemFunctions.containsKey(normalizedName)) { return Optional.of( new FunctionLookup.Result( FunctionIdentifier.of(funcName), getFunctionDefinition( normalizedName, tempSystemFunctions.get(normalizedName)))); } Optional<FunctionDefinition> candidate = moduleManager.getFunctionDefinition(normalizedName); ObjectIdentifier oi = ObjectIdentifier.of( catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase(), funcName); return candidate .map( fd -> Optional.of( new FunctionLookup.Result( FunctionIdentifier.of(funcName), fd))) .orElseGet(() -> resolvePreciseFunctionReference(oi)); } @SuppressWarnings("unchecked") private void validateAndPrepareFunction(CatalogFunction function) throws ClassNotFoundException { // If the input is instance of UserDefinedFunction, it means it uses the new type inference. // In this situation the UDF have not been validated and cleaned, so we need to validate it // and clean its closure here. // If the input is instance of `ScalarFunctionDefinition`, `TableFunctionDefinition` and so // on, // it means it uses the old type inference. We assume that they have been validated before // being // wrapped. if (function instanceof InlineCatalogFunction && ((InlineCatalogFunction) function).getDefinition() instanceof UserDefinedFunction) { FunctionDefinition definition = ((InlineCatalogFunction) function).getDefinition(); UserDefinedFunctionHelper.prepareInstance(config, (UserDefinedFunction) definition); } else if (function.getFunctionLanguage() == FunctionLanguage.JAVA) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); UserDefinedFunctionHelper.validateClass( (Class<? extends UserDefinedFunction>) contextClassLoader.loadClass(function.getClassName())); } } private FunctionDefinition getFunctionDefinition(String name, CatalogFunction function) { if (function instanceof InlineCatalogFunction) { // The instantiated UDFs have been validated and cleaned when registering, just return // them // directly. return ((InlineCatalogFunction) function).getDefinition(); } return UserDefinedFunctionHelper.instantiateFunction( Thread.currentThread() .getContextClassLoader(), // TODO use classloader of catalog manager in the // future config, name, function); } /** The CatalogFunction which holds a instantiated UDF. */ private static class InlineCatalogFunction implements CatalogFunction { private final FunctionDefinition definition; InlineCatalogFunction(FunctionDefinition definition) { this.definition = definition; } @Override public String getClassName() { // Not all instantiated UDFs have a class name, such as Python Lambda UDF. Even if the // UDF // has a class name, there is no guarantee that the new UDF object constructed from the // class name is the same as the UDF held by this object. To reduce the chance of making // mistakes, UnsupportedOperationException is thrown here. throw new UnsupportedOperationException( "This CatalogFunction is a InlineCatalogFunction. This method should not be called."); } @Override public CatalogFunction copy() { return new InlineCatalogFunction(definition); } @Override public Optional<String> getDescription() { return Optional.empty(); } @Override public Optional<String> getDetailedDescription() { return Optional.empty(); } @Override public boolean isGeneric() { throw new UnsupportedOperationException( "This CatalogFunction is a InlineCatalogFunction. This method should not be called."); } @Override public FunctionLanguage getFunctionLanguage() { return FunctionLanguage.JAVA; } public FunctionDefinition getDefinition() { return definition; } } }
/******************************************************************************* * Copyright 2013 Raphael Jolivet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package java2typescript.jaxrs; import static java2typescript.jaxrs.model.ParamType.BODY; import static java2typescript.jaxrs.model.ParamType.FORM; import static java2typescript.jaxrs.model.ParamType.PATH; import static java2typescript.jaxrs.model.ParamType.QUERY; import java.beans.Introspector; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java2typescript.jackson.module.DefinitionGenerator; import java2typescript.jackson.module.grammar.AnyType; import java2typescript.jackson.module.grammar.ClassType; import java2typescript.jackson.module.grammar.FunctionType; import java2typescript.jackson.module.grammar.Module; import java2typescript.jackson.module.grammar.StringType; import java2typescript.jackson.module.grammar.VoidType; import java2typescript.jackson.module.grammar.base.AbstractNamedType; import java2typescript.jackson.module.grammar.base.AbstractType; import java2typescript.jaxrs.model.HttpMethod; import java2typescript.jaxrs.model.Param; import java2typescript.jaxrs.model.RestMethod; import java2typescript.jaxrs.model.RestService; import javax.ws.rs.DELETE; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Request; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; /** * Generates a {@link RestService} description out of a service class / * interface */ public class ServiceDescriptorGenerator { private static final String JS_TEMPLATE_RES = "module-template.js"; private static final String MODULE_NAME_PLACEHOLDER = "%MODULE_NAME%"; private static final String JSON_PLACEHOLDER = "%JSON%"; static private final String ROOT_URL_VAR = "rootUrl"; static private final String ADAPTER_VAR = "adapter"; private final Collection<? extends Class<?>> classes; private ObjectMapper mapper; public ServiceDescriptorGenerator(Collection<? extends Class<?>> classes) { this(classes, new ObjectMapper()); } public ServiceDescriptorGenerator(Collection<? extends Class<?>> classes, ObjectMapper mapper) { this.classes = classes; this.mapper = mapper; addDummyMappingForJAXRSClasses(); } private class DummySerializer extends JsonSerializer<Object> { @Override public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { // No implementation here } } /** Those classes will be transformed as "any" */ private void addDummyMappingForJAXRSClasses() { SimpleModule module = new SimpleModule("dummy jax-rs mappings"); module.addSerializer(Response.class, new DummySerializer()); module.addSerializer(UriInfo.class, new DummySerializer()); module.addSerializer(Request.class, new DummySerializer()); mapper.registerModule(module); } /** * Main method to generate a REST Service desciptor out of JAX-RS service * class */ private Collection<RestService> generateRestServices(Collection<? extends Class<?>> classes) { List<RestService> services = new ArrayList<RestService>(); for (Class<?> clazz : classes) { RestService service = new RestService(); service.setName(clazz.getSimpleName()); Path pathAnnotation = clazz.getAnnotation(Path.class); if (pathAnnotation == null) { throw new RuntimeException("No @Path on class " + clazz.getName()); } service.setPath(pathAnnotation.value()); for (Method method : clazz.getDeclaredMethods()) { if (Modifier.isPublic(method.getModifiers())) { RestMethod restMethod = generateMethod(method); service.getMethods().put(restMethod.getName(), restMethod); } } services.add(service); } return services; } /** * Generates a typescript definition of the REST service together with all * required named types (classes and enums) */ public Module generateTypeScript(String moduleName) throws JsonMappingException { // Generates Typescript module out of service classses definition DefinitionGenerator defGen = new DefinitionGenerator(mapper); Module module = defGen.generateTypeScript(moduleName, classes, null); // For each rest service, update methods with parameter names, got from Rest service descriptor for (RestService restService : generateRestServices(classes)) { ClassType classDef = (ClassType) module.getNamedTypes().get(restService.getName()); decorateParamNames(restService, classDef); } addModuleVars(module, classes); return module; } /** Generate JS implementation * @throws IOException * @throws JsonMappingException * @throws JsonGenerationException */ public void generateJavascript(String moduleName, Writer writer) throws JsonGenerationException, JsonMappingException, IOException { // Generate JSON as String StringWriter jsonOut = new StringWriter(); Collection<RestService> restServices = this.generateRestServices(classes); RestService.toJSON(restServices, jsonOut); // Read template content String jsTemplate = com.google.common.io.Resources.toString(// ServiceDescriptorGenerator.class.getResource(JS_TEMPLATE_RES), // Charset.defaultCharset()); // Replace template values String out = jsTemplate.replace(MODULE_NAME_PLACEHOLDER, moduleName); out = out.replace(JSON_PLACEHOLDER, jsonOut.toString()); writer.write(out); } private RestMethod generateMethod(Method method) { RestMethod restMethod = new RestMethod(); Path pathAnnotation = method.getAnnotation(Path.class); restMethod.setPath(pathAnnotation == null ? "" : pathAnnotation.value()); restMethod.setName(method.getName()); if (method.getAnnotation(GET.class) != null) { restMethod.setHttpMethod(HttpMethod.GET); } if (method.getAnnotation(POST.class) != null) { restMethod.setHttpMethod(HttpMethod.POST); } if (method.getAnnotation(PUT.class) != null) { restMethod.setHttpMethod(HttpMethod.PUT); } if (method.getAnnotation(DELETE.class) != null) { restMethod.setHttpMethod(HttpMethod.DELETE); } if (restMethod.getHttpMethod() == null) { throw new RuntimeException("No Http method defined for method : " + method.getName()); } restMethod.setParams(generateParams(method)); return restMethod; } private List<Param> generateParams(Method method) { List<Param> params = new ArrayList<Param>(); for (Annotation[] annotations : method.getParameterAnnotations()) { Param param = new Param(); param.setType(BODY); // By default, in case of no annotation param.setName("body"); for (Annotation annotation : annotations) { fillParam(annotation, param); } params.add(param); } return params; } private void fillParam(Annotation annot, Param param) { if (annot instanceof PathParam) { param.setType(PATH); param.setName(((PathParam) annot).value()); } else if (annot instanceof QueryParam) { param.setType(QUERY); param.setName(((QueryParam) annot).value()); } else if (annot instanceof FormParam) { param.setType(FORM); param.setName(((FormParam) annot).value()); } else if (annot instanceof Context) { param.setContext(true); } } /** Use collected annotation in order to ad param names to service methods */ private void decorateParamNames(RestService module, ClassType classDef) { // Loop on methods of the service for (RestMethod restMethod : module.getMethods().values()) { FunctionType function = classDef.getMethods().get(restMethod.getName()); // Copy ordered list of param types List<AbstractType> types = new ArrayList<AbstractType>(); types.addAll(function.getParameters().values()); function.getParameters().clear(); int i = 0; for (Param param : restMethod.getParams()) { // Skip @Context parameters if (!param.isContext()) { function.getParameters().put(param.getName(), types.get(i)); } i++; } } } private void addModuleVars(Module module, Collection<? extends Class<?>> serviceClasses) { module.getVars().put(ROOT_URL_VAR, StringType.getInstance()); // Adapter function FunctionType adapterFuncType = new FunctionType(); adapterFuncType.setResultType(VoidType.getInstance()); adapterFuncType.getParameters().put("httpMethod", StringType.getInstance()); adapterFuncType.getParameters().put("path", StringType.getInstance()); adapterFuncType.getParameters().put("getParams", ClassType.getObjectClass()); adapterFuncType.getParameters().put("postParams", ClassType.getObjectClass()); adapterFuncType.getParameters().put("body", AnyType.getInstance()); module.getVars().put(ROOT_URL_VAR, StringType.getInstance()); module.getVars().put(ADAPTER_VAR, adapterFuncType); // Generate : var someService : SomeService; for (Class<?> clazz : serviceClasses) { String className = clazz.getSimpleName(); AbstractNamedType type = module.getNamedTypes().get(className); String varName = Introspector.decapitalize(className); module.getVars().put(varName, type); } } }
/** * Copyright 2013, Big Switch Networks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.staticflowentry; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.HAListenerTypeMarker; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IHAListener; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.IOFSwitchListener; import net.floodlightcontroller.core.PortChangeType; import net.floodlightcontroller.core.annotations.LogMessageCategory; import net.floodlightcontroller.core.annotations.LogMessageDoc; import net.floodlightcontroller.core.internal.IOFSwitchService; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.core.util.AppCookie; import net.floodlightcontroller.restserver.IRestApiService; import net.floodlightcontroller.staticflowentry.web.StaticFlowEntryWebRoutable; import net.floodlightcontroller.storage.IResultSet; import net.floodlightcontroller.storage.IStorageSourceListener; import net.floodlightcontroller.storage.IStorageSourceService; import net.floodlightcontroller.storage.StorageException; import net.floodlightcontroller.util.ActionUtils; import net.floodlightcontroller.util.FlowModUtils; import net.floodlightcontroller.util.InstructionUtils; import net.floodlightcontroller.util.MatchUtils; import org.projectfloodlight.openflow.protocol.OFFactories; import org.projectfloodlight.openflow.protocol.OFFlowAdd; import org.projectfloodlight.openflow.protocol.OFFlowDeleteStrict; import org.projectfloodlight.openflow.protocol.OFFlowMod; import org.projectfloodlight.openflow.protocol.OFFlowRemoved; import org.projectfloodlight.openflow.protocol.OFFlowRemovedReason; import org.projectfloodlight.openflow.protocol.OFPortDesc; import org.projectfloodlight.openflow.protocol.OFMessage; import org.projectfloodlight.openflow.protocol.OFType; import org.projectfloodlight.openflow.protocol.OFVersion; import org.projectfloodlight.openflow.protocol.ver10.OFFlowRemovedReasonSerializerVer10; import org.projectfloodlight.openflow.protocol.ver11.OFFlowRemovedReasonSerializerVer11; import org.projectfloodlight.openflow.protocol.ver12.OFFlowRemovedReasonSerializerVer12; import org.projectfloodlight.openflow.protocol.ver13.OFFlowRemovedReasonSerializerVer13; import org.projectfloodlight.openflow.protocol.ver14.OFFlowRemovedReasonSerializerVer14; import org.projectfloodlight.openflow.types.DatapathId; import org.projectfloodlight.openflow.types.TableId; import org.projectfloodlight.openflow.types.U16; import org.projectfloodlight.openflow.types.U64; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @LogMessageCategory("Static Flow Pusher") /** * This module is responsible for maintaining a set of static flows on * switches. This is just a big 'ol dumb list of flows and something external * is responsible for ensuring they make sense for the network. */ public class StaticFlowEntryPusher implements IOFSwitchListener, IFloodlightModule, IStaticFlowEntryPusherService, IStorageSourceListener, IOFMessageListener { protected static Logger log = LoggerFactory.getLogger(StaticFlowEntryPusher.class); public static final String StaticFlowName = "staticflowentry"; public static final int STATIC_FLOW_APP_ID = 10; static { AppCookie.registerApp(STATIC_FLOW_APP_ID, StaticFlowName); } public static final String TABLE_NAME = "controller_staticflowtableentry"; public static final String COLUMN_NAME = "name"; public static final String COLUMN_SWITCH = "switch"; public static final String COLUMN_TABLE_ID = "table"; public static final String COLUMN_ACTIVE = "active"; public static final String COLUMN_IDLE_TIMEOUT = "idle_timeout"; public static final String COLUMN_HARD_TIMEOUT = "hard_timeout"; public static final String COLUMN_PRIORITY = "priority"; public static final String COLUMN_COOKIE = "cookie"; // Common location for Match Strings. Still the same, but relocated. public static final String COLUMN_IN_PORT = MatchUtils.STR_IN_PORT; public static final String COLUMN_DL_SRC = MatchUtils.STR_DL_SRC; public static final String COLUMN_DL_DST = MatchUtils.STR_DL_DST; public static final String COLUMN_DL_VLAN = MatchUtils.STR_DL_VLAN; public static final String COLUMN_DL_VLAN_PCP = MatchUtils.STR_DL_VLAN_PCP; public static final String COLUMN_DL_TYPE = MatchUtils.STR_DL_TYPE; public static final String COLUMN_NW_TOS = MatchUtils.STR_NW_TOS; public static final String COLUMN_NW_ECN = MatchUtils.STR_NW_ECN; public static final String COLUMN_NW_DSCP = MatchUtils.STR_NW_DSCP; public static final String COLUMN_NW_PROTO = MatchUtils.STR_NW_PROTO; public static final String COLUMN_NW_SRC = MatchUtils.STR_NW_SRC; // includes CIDR-style netmask, e.g. "128.8.128.0/24" public static final String COLUMN_NW_DST = MatchUtils.STR_NW_DST; public static final String COLUMN_SCTP_SRC = MatchUtils.STR_SCTP_SRC; public static final String COLUMN_SCTP_DST = MatchUtils.STR_SCTP_DST; public static final String COLUMN_UDP_SRC = MatchUtils.STR_UDP_SRC; public static final String COLUMN_UDP_DST = MatchUtils.STR_UDP_DST; public static final String COLUMN_TCP_SRC = MatchUtils.STR_TCP_SRC; public static final String COLUMN_TCP_DST = MatchUtils.STR_TCP_DST; public static final String COLUMN_TP_SRC = MatchUtils.STR_TP_SRC; // support for OF1.0 generic transport ports (possibly sent from the rest api). Only use these to read them in, but store them as the type of port their IpProto is set to. public static final String COLUMN_TP_DST = MatchUtils.STR_TP_DST; /* newly added matches for OF1.3 port start here */ public static final String COLUMN_ICMP_TYPE = MatchUtils.STR_ICMP_TYPE; public static final String COLUMN_ICMP_CODE = MatchUtils.STR_ICMP_CODE; public static final String COLUMN_ARP_OPCODE = MatchUtils.STR_ARP_OPCODE; public static final String COLUMN_ARP_SHA = MatchUtils.STR_ARP_SHA; public static final String COLUMN_ARP_DHA = MatchUtils.STR_ARP_DHA; public static final String COLUMN_ARP_SPA = MatchUtils.STR_ARP_SPA; public static final String COLUMN_ARP_DPA = MatchUtils.STR_ARP_DPA; /* IPv6 related columns */ public static final String COLUMN_NW6_SRC = MatchUtils.STR_IPV6_SRC; public static final String COLUMN_NW6_DST = MatchUtils.STR_IPV6_DST; public static final String COLUMN_IPV6_FLOW_LABEL = MatchUtils.STR_IPV6_FLOW_LABEL; public static final String COLUMN_ICMP6_TYPE = MatchUtils.STR_ICMPV6_TYPE; public static final String COLUMN_ICMP6_CODE = MatchUtils.STR_ICMPV6_CODE; public static final String COLUMN_ND_SLL = MatchUtils.STR_IPV6_ND_SSL; public static final String COLUMN_ND_TLL = MatchUtils.STR_IPV6_ND_TTL; public static final String COLUMN_ND_TARGET = MatchUtils.STR_IPV6_ND_TARGET; public static final String COLUMN_MPLS_LABEL = MatchUtils.STR_MPLS_LABEL; public static final String COLUMN_MPLS_TC = MatchUtils.STR_MPLS_TC; public static final String COLUMN_MPLS_BOS = MatchUtils.STR_MPLS_BOS; public static final String COLUMN_METADATA = MatchUtils.STR_METADATA; public static final String COLUMN_TUNNEL_ID = MatchUtils.STR_TUNNEL_ID; public static final String COLUMN_PBB_ISID = MatchUtils.STR_PBB_ISID; /* end newly added matches */ public static final String COLUMN_ACTIONS = "actions"; public static final String COLUMN_INSTR_GOTO_TABLE = InstructionUtils.STR_GOTO_TABLE; // instructions are each getting their own column, due to write and apply actions, which themselves contain a variable list of actions public static final String COLUMN_INSTR_WRITE_METADATA = InstructionUtils.STR_WRITE_METADATA; public static final String COLUMN_INSTR_WRITE_ACTIONS = InstructionUtils.STR_WRITE_ACTIONS; public static final String COLUMN_INSTR_APPLY_ACTIONS = InstructionUtils.STR_APPLY_ACTIONS; public static final String COLUMN_INSTR_CLEAR_ACTIONS = InstructionUtils.STR_CLEAR_ACTIONS; public static final String COLUMN_INSTR_GOTO_METER = InstructionUtils.STR_GOTO_METER; public static final String COLUMN_INSTR_EXPERIMENTER = InstructionUtils.STR_EXPERIMENTER; public static String ColumnNames[] = { COLUMN_NAME, COLUMN_SWITCH, COLUMN_TABLE_ID, COLUMN_ACTIVE, COLUMN_IDLE_TIMEOUT, COLUMN_HARD_TIMEOUT, // table id is new for OF1.3 as well COLUMN_PRIORITY, COLUMN_COOKIE, COLUMN_IN_PORT, COLUMN_DL_SRC, COLUMN_DL_DST, COLUMN_DL_VLAN, COLUMN_DL_VLAN_PCP, COLUMN_DL_TYPE, COLUMN_NW_TOS, COLUMN_NW_PROTO, COLUMN_NW_SRC, COLUMN_NW_DST, COLUMN_TP_SRC, COLUMN_TP_DST, /* newly added matches for OF1.3 port start here */ COLUMN_SCTP_SRC, COLUMN_SCTP_DST, COLUMN_UDP_SRC, COLUMN_UDP_DST, COLUMN_TCP_SRC, COLUMN_TCP_DST, COLUMN_ICMP_TYPE, COLUMN_ICMP_CODE, COLUMN_ARP_OPCODE, COLUMN_ARP_SHA, COLUMN_ARP_DHA, COLUMN_ARP_SPA, COLUMN_ARP_DPA, /* IPv6 related matches */ COLUMN_NW6_SRC, COLUMN_NW6_DST, COLUMN_ICMP6_TYPE, COLUMN_ICMP6_CODE, COLUMN_IPV6_FLOW_LABEL, COLUMN_ND_SLL, COLUMN_ND_TLL, COLUMN_ND_TARGET, COLUMN_MPLS_LABEL, COLUMN_MPLS_TC, COLUMN_MPLS_BOS, COLUMN_METADATA, COLUMN_TUNNEL_ID, COLUMN_PBB_ISID, /* end newly added matches */ COLUMN_ACTIONS, /* newly added instructions for OF1.3 port start here */ COLUMN_INSTR_GOTO_TABLE, COLUMN_INSTR_WRITE_METADATA, COLUMN_INSTR_WRITE_ACTIONS, COLUMN_INSTR_APPLY_ACTIONS, COLUMN_INSTR_CLEAR_ACTIONS, COLUMN_INSTR_GOTO_METER, COLUMN_INSTR_EXPERIMENTER /* end newly added instructions */ }; protected IFloodlightProviderService floodlightProviderService; protected IOFSwitchService switchService; protected IStorageSourceService storageSourceService; protected IRestApiService restApiService; private IHAListener haListener; // Map<DPID, Map<Name, FlowMod>>; FlowMod can be null to indicate non-active protected Map<String, Map<String, OFFlowMod>> entriesFromStorage; // Entry Name -> DPID of Switch it's on protected Map<String, String> entry2dpid; // Class to sort FlowMod's by priority, from lowest to highest class FlowModSorter implements Comparator<String> { private String dpid; public FlowModSorter(String dpid) { this.dpid = dpid; } @Override public int compare(String o1, String o2) { OFFlowMod f1 = entriesFromStorage.get(dpid).get(o1); OFFlowMod f2 = entriesFromStorage.get(dpid).get(o2); if (f1 == null || f2 == null) // sort active=false flows by key return o1.compareTo(o2); return U16.of(f1.getPriority()).getValue() - U16.of(f2.getPriority()).getValue(); } }; /** * used for debugging and unittests * @return the number of static flow entries as cached from storage */ public int countEntries() { int size = 0; if (entriesFromStorage == null) return 0; for (String ofswitch : entriesFromStorage.keySet()) size += entriesFromStorage.get(ofswitch).size(); return size; } public IFloodlightProviderService getFloodlightProvider() { return floodlightProviderService; } public void setFloodlightProvider(IFloodlightProviderService floodlightProviderService) { this.floodlightProviderService = floodlightProviderService; } public void setStorageSource(IStorageSourceService storageSourceService) { this.storageSourceService = storageSourceService; } /** * Reads from our entriesFromStorage for the specified switch and * sends the FlowMods down to the controller in <b>sorted</b> order. * * Sorted is important to maintain correctness of the switch: * if a packet would match both a lower and a higher priority * rule, then we want it to match the higher priority or nothing, * but never just the lower priority one. Inserting from high to * low priority fixes this. * * TODO consider adding a "block all" flow mod and then removing it * while starting up. * * @param sw The switch to send entries to */ protected void sendEntriesToSwitch(DatapathId switchId) { IOFSwitch sw = switchService.getSwitch(switchId); if (sw == null) return; String stringId = sw.getId().toString(); if ((entriesFromStorage != null) && (entriesFromStorage.containsKey(stringId))) { Map<String, OFFlowMod> entries = entriesFromStorage.get(stringId); List<String> sortedList = new ArrayList<String>(entries.keySet()); // weird that Collections.sort() returns void Collections.sort( sortedList, new FlowModSorter(stringId)); for (String entryName : sortedList) { OFFlowMod flowMod = entries.get(entryName); if (flowMod != null) { if (log.isDebugEnabled()) { log.debug("Pushing static entry {} for {}", stringId, entryName); } writeFlowModToSwitch(sw, flowMod); } } } } /** * Used only for bundle-local indexing * * @param map * @return */ protected Map<String, String> computeEntry2DpidMap( Map<String, Map<String, OFFlowMod>> map) { Map<String, String> ret = new ConcurrentHashMap<String, String>(); for(String dpid : map.keySet()) { for( String entry: map.get(dpid).keySet()) ret.put(entry, dpid); } return ret; } /** * Read entries from storageSource, and store them in a hash * * @return */ @LogMessageDoc(level="ERROR", message="failed to access storage: {reason}", explanation="Could not retrieve static flows from the system " + "database", recommendation=LogMessageDoc.CHECK_CONTROLLER) private Map<String, Map<String, OFFlowMod>> readEntriesFromStorage() { Map<String, Map<String, OFFlowMod>> entries = new ConcurrentHashMap<String, Map<String, OFFlowMod>>(); try { Map<String, Object> row; // null1=no predicate, null2=no ordering IResultSet resultSet = storageSourceService.executeQuery(TABLE_NAME, ColumnNames, null, null); for (Iterator<IResultSet> it = resultSet.iterator(); it.hasNext();) { row = it.next().getRow(); parseRow(row, entries); } } catch (StorageException e) { log.error("failed to access storage: {}", e.getMessage()); // if the table doesn't exist, then wait to populate later via // setStorageSource() } return entries; } /** * Take a single row, turn it into a flowMod, and add it to the * entries{$dpid}.{$entryName}=FlowMod * * IF an entry is in active, mark it with FlowMod = null * * @param row * @param entries */ void parseRow(Map<String, Object> row, Map<String, Map<String, OFFlowMod>> entries) { String switchName = null; String entryName = null; StringBuffer matchString = new StringBuffer(); OFFlowMod.Builder fmb = null; if (!row.containsKey(COLUMN_SWITCH) || !row.containsKey(COLUMN_NAME)) { log.debug("skipping entry with missing required 'switch' or 'name' entry: {}", row); return; } // most error checking done with ClassCastException try { // first, snag the required entries, for debugging info switchName = (String) row.get(COLUMN_SWITCH); entryName = (String) row.get(COLUMN_NAME); if (!entries.containsKey(switchName)) { entries.put(switchName, new HashMap<String, OFFlowMod>()); } // get the correct builder for the OF version supported by the switch try { fmb = OFFactories.getFactory(switchService.getSwitch(DatapathId.of(switchName)).getOFFactory().getVersion()).buildFlowModify(); } catch (NullPointerException e) { /* switch was not connected/known */ storageSourceService.deleteRowAsync(TABLE_NAME, entryName); log.error("Deleting entry {}. Switch {} was not connected to the controller, and we need to know the OF protocol version to compose the flow mod.", entryName, switchName); return; } StaticFlowEntries.initDefaultFlowMod(fmb, entryName); for (String key : row.keySet()) { if (row.get(key) == null) { continue; } if (key.equals(COLUMN_SWITCH) || key.equals(COLUMN_NAME) || key.equals("id")) { continue; // already handled } if (key.equals(COLUMN_ACTIVE)) { if (!Boolean.valueOf((String) row.get(COLUMN_ACTIVE))) { log.debug("skipping inactive entry {} for switch {}", entryName, switchName); entries.get(switchName).put(entryName, null); // mark this an inactive return; } } else if (key.equals(COLUMN_HARD_TIMEOUT)) { fmb.setHardTimeout(Integer.valueOf((String) row.get(COLUMN_HARD_TIMEOUT))); } else if (key.equals(COLUMN_IDLE_TIMEOUT)) { fmb.setIdleTimeout(Integer.valueOf((String) row.get(COLUMN_IDLE_TIMEOUT))); } else if (key.equals(COLUMN_TABLE_ID)) { if (fmb.getVersion().compareTo(OFVersion.OF_10) > 0) { fmb.setTableId(TableId.of(Integer.parseInt((String) row.get(key)))); // support multiple flow tables for OF1.1+ } else { log.error("Table not supported in OpenFlow 1.0"); } } else if (key.equals(COLUMN_ACTIONS)) { ActionUtils.fromString(fmb, (String) row.get(COLUMN_ACTIONS), log); } else if (key.equals(COLUMN_COOKIE)) { fmb.setCookie(StaticFlowEntries.computeEntryCookie(Integer.valueOf((String) row.get(COLUMN_COOKIE)), entryName)); } else if (key.equals(COLUMN_PRIORITY)) { fmb.setPriority(U16.t(Integer.valueOf((String) row.get(COLUMN_PRIORITY)))); } else if (key.equals(COLUMN_INSTR_APPLY_ACTIONS)) { InstructionUtils.applyActionsFromString(fmb, (String) row.get(COLUMN_INSTR_APPLY_ACTIONS), log); } else if (key.equals(COLUMN_INSTR_CLEAR_ACTIONS)) { InstructionUtils.clearActionsFromString(fmb, (String) row.get(COLUMN_INSTR_CLEAR_ACTIONS), log); } else if (key.equals(COLUMN_INSTR_EXPERIMENTER)) { InstructionUtils.experimenterFromString(fmb, (String) row.get(COLUMN_INSTR_EXPERIMENTER), log); } else if (key.equals(COLUMN_INSTR_GOTO_METER)) { InstructionUtils.meterFromString(fmb, (String) row.get(COLUMN_INSTR_GOTO_METER), log); } else if (key.equals(COLUMN_INSTR_GOTO_TABLE)) { InstructionUtils.gotoTableFromString(fmb, (String) row.get(COLUMN_INSTR_GOTO_TABLE), log); } else if (key.equals(COLUMN_INSTR_WRITE_ACTIONS)) { InstructionUtils.writeActionsFromString(fmb, (String) row.get(COLUMN_INSTR_WRITE_ACTIONS), log); } else if (key.equals(COLUMN_INSTR_WRITE_METADATA)) { InstructionUtils.writeMetadataFromString(fmb, (String) row.get(COLUMN_INSTR_WRITE_METADATA), log); } else { // the rest of the keys are for Match().fromString() if (matchString.length() > 0) { matchString.append(","); } matchString.append(key + "=" + row.get(key).toString()); } } } catch (ClassCastException e) { if (entryName != null && switchName != null) { log.warn("Skipping entry {} on switch {} with bad data : " + e.getMessage(), entryName, switchName); } else { log.warn("Skipping entry with bad data: {} :: {} ", e.getMessage(), e.getStackTrace()); } } catch (NullPointerException e) { if (fmb == null) { log.error("Could not find switch DPID {} in the ISwitchService. Skipping entry with bad data.", switchName); } } String match = matchString.toString(); try { fmb.setMatch(MatchUtils.fromString(match, fmb.getVersion())); } catch (IllegalArgumentException e) { log.error(e.toString()); log.error("Ignoring flow entry {} on switch {} with illegal OFMatch() key: " + match, entryName, switchName); return; } catch (Exception e) { log.error("OF version incompatible for the match: " + match); e.printStackTrace(); return; } entries.get(switchName).put(entryName, fmb.build()); // add the FlowMod message to the table } @Override public void switchAdded(DatapathId switchId) { log.debug("Switch {} connected; processing its static entries", switchId.toString()); sendEntriesToSwitch(switchId); } @Override public void switchRemoved(DatapathId switchId) { // do NOT delete from our internal state; we're tracking the rules, // not the switches } @Override public void switchActivated(DatapathId switchId) { // no-op } @Override public void switchChanged(DatapathId switchId) { // no-op } @Override public void switchPortChanged(DatapathId switchId, OFPortDesc port, PortChangeType type) { // no-op } @Override public void rowsModified(String tableName, Set<Object> rowKeys) { // This handles both rowInsert() and rowUpdate() log.debug("Modifying Table {}", tableName); HashMap<String, Map<String, OFFlowMod>> entriesToAdd = new HashMap<String, Map<String, OFFlowMod>>(); // build up list of what was added for (Object key: rowKeys) { IResultSet resultSet = storageSourceService.getRow(tableName, key); Iterator<IResultSet> it = resultSet.iterator(); while (it.hasNext()) { Map<String, Object> row = it.next().getRow(); parseRow(row, entriesToAdd); } } // batch updates by switch and blast them out for (String dpid : entriesToAdd.keySet()) { if (!entriesFromStorage.containsKey(dpid)) entriesFromStorage.put(dpid, new HashMap<String, OFFlowMod>()); List<OFMessage> outQueue = new ArrayList<OFMessage>(); /* For every flow per dpid, decide how to "add" the flow. */ for (String entry : entriesToAdd.get(dpid).keySet()) { OFFlowMod newFlowMod = entriesToAdd.get(dpid).get(entry); OFFlowMod oldFlowMod = null; String dpidOldFlowMod = entry2dpid.get(entry); if (dpidOldFlowMod != null) { oldFlowMod = entriesFromStorage.get(dpidOldFlowMod).remove(entry); } /* Modify, which can be either a Flow MODIFY_STRICT or a Flow DELETE_STRICT with a side of Flow ADD */ if (oldFlowMod != null && newFlowMod != null) { /* MODIFY_STRICT b/c the match is still the same */ if (oldFlowMod.getMatch().equals(newFlowMod.getMatch()) && oldFlowMod.getCookie().equals(newFlowMod.getCookie()) && oldFlowMod.getPriority() == newFlowMod.getPriority() && dpidOldFlowMod.equalsIgnoreCase(dpid)) { log.debug("ModifyStrict SFP Flow"); entriesFromStorage.get(dpid).put(entry, newFlowMod); entry2dpid.put(entry, dpid); newFlowMod = FlowModUtils.toFlowModifyStrict(newFlowMod); outQueue.add(newFlowMod); /* DELETE_STRICT and then ADD b/c the match is now different */ } else { log.debug("DeleteStrict and Add SFP Flow"); oldFlowMod = FlowModUtils.toFlowDeleteStrict(oldFlowMod); OFFlowAdd addTmp = FlowModUtils.toFlowAdd(newFlowMod); /* If the flow's dpid and the current switch we're looking at are the same, add to the queue. */ if (dpidOldFlowMod.equals(dpid)) { outQueue.add(oldFlowMod); outQueue.add(addTmp); /* Otherwise, go ahead and send the flows now (since queuing them will send to the wrong switch). */ } else { writeOFMessageToSwitch(DatapathId.of(dpidOldFlowMod), oldFlowMod); writeOFMessageToSwitch(DatapathId.of(dpid), FlowModUtils.toFlowAdd(newFlowMod)); } entriesFromStorage.get(dpid).put(entry, addTmp); entry2dpid.put(entry, dpid); } /* Add a brand-new flow with ADD */ } else if (newFlowMod != null && oldFlowMod == null) { log.debug("Add SFP Flow"); OFFlowAdd addTmp = FlowModUtils.toFlowAdd(newFlowMod); entriesFromStorage.get(dpid).put(entry, addTmp); entry2dpid.put(entry, dpid); outQueue.add(addTmp); /* Something strange happened, so remove the flow */ } else if (newFlowMod == null) { entriesFromStorage.get(dpid).remove(entry); entry2dpid.remove(entry); } } /* Batch-write all queued messages to the switch */ writeOFMessagesToSwitch(DatapathId.of(dpid), outQueue); } } @Override public void rowsDeleted(String tableName, Set<Object> rowKeys) { if (log.isDebugEnabled()) { log.debug("Deleting from table {}", tableName); } for(Object obj : rowKeys) { if (!(obj instanceof String)) { log.debug("Tried to delete non-string key {}; ignoring", obj); continue; } deleteStaticFlowEntry((String) obj); } } @LogMessageDoc(level="ERROR", message="inconsistent internal state: no switch has rule {rule}", explanation="Inconsistent internat state discovered while " + "deleting a static flow rule", recommendation=LogMessageDoc.REPORT_CONTROLLER_BUG) private void deleteStaticFlowEntry(String entryName) { String dpid = entry2dpid.remove(entryName); if (dpid == null) { // assume state has been cleared by deleteFlowsForSwitch() or // deleteAllFlows() return; } if (log.isDebugEnabled()) { log.debug("Sending delete flow mod for flow {} for switch {}", entryName, dpid); } // send flow_mod delete if (switchService.getSwitch(DatapathId.of(dpid)) != null) { OFFlowDeleteStrict flowMod = FlowModUtils.toFlowDeleteStrict(entriesFromStorage.get(dpid).get(entryName)); if (entriesFromStorage.containsKey(dpid) && entriesFromStorage.get(dpid).containsKey(entryName)) { entriesFromStorage.get(dpid).remove(entryName); } else { log.debug("Tried to delete non-existent entry {} for switch {}", entryName, dpid); return; } writeFlowModToSwitch(DatapathId.of(dpid), flowMod); } else { log.debug("Not sending flow delete for disconnected switch."); } return; } /** * Writes a list of OFMessages to a switch * @param dpid The datapath ID of the switch to write to * @param messages The list of OFMessages to write. */ @LogMessageDoc(level="ERROR", message="Tried to write to switch {switch} but got {error}", explanation="An I/O error occured while trying to write a " + "static flow to a switch", recommendation=LogMessageDoc.CHECK_SWITCH) private void writeOFMessagesToSwitch(DatapathId dpid, List<OFMessage> messages) { IOFSwitch ofswitch = switchService.getSwitch(dpid); if (ofswitch != null) { // is the switch connected if (log.isDebugEnabled()) { log.debug("Sending {} new entries to {}", messages.size(), dpid); } ofswitch.write(messages); ofswitch.flush(); } } /** * Writes a single OFMessage to a switch * @param dpid The datapath ID of the switch to write to * @param message The OFMessage to write. */ @LogMessageDoc(level="ERROR", message="Tried to write to switch {switch} but got {error}", explanation="An I/O error occured while trying to write a " + "static flow to a switch", recommendation=LogMessageDoc.CHECK_SWITCH) private void writeOFMessageToSwitch(DatapathId dpid, OFMessage message) { IOFSwitch ofswitch = switchService.getSwitch(dpid); if (ofswitch != null) { // is the switch connected if (log.isDebugEnabled()) { log.debug("Sending 1 new entries to {}", dpid.toString()); } ofswitch.write(message); ofswitch.flush(); } } /** * Writes an OFFlowMod to a switch. It checks to make sure the switch * exists before it sends * @param dpid The data to write the flow mod to * @param flowMod The OFFlowMod to write */ private void writeFlowModToSwitch(DatapathId dpid, OFFlowMod flowMod) { IOFSwitch ofSwitch = switchService.getSwitch(dpid); if (ofSwitch == null) { if (log.isDebugEnabled()) { log.debug("Not deleting key {} :: switch {} not connected", dpid.toString()); } return; } writeFlowModToSwitch(ofSwitch, flowMod); } /** * Writes an OFFlowMod to a switch * @param sw The IOFSwitch to write to * @param flowMod The OFFlowMod to write */ @LogMessageDoc(level="ERROR", message="Tried to write OFFlowMod to {switch} but got {error}", explanation="An I/O error occured while trying to write a " + "static flow to a switch", recommendation=LogMessageDoc.CHECK_SWITCH) private void writeFlowModToSwitch(IOFSwitch sw, OFFlowMod flowMod) { sw.write(flowMod); sw.flush(); } @Override public String getName() { return StaticFlowName; } /** * Handles a flow removed message from a switch. If the flow was removed * and we did not explicitly delete it we re-install it. If we explicitly * removed the flow we stop the processing of the flow removed message. * @param sw The switch that sent the flow removed message. * @param msg The flow removed message. * @param cntx The associated context. * @return Whether to continue processing this message. */ public Command handleFlowRemoved(IOFSwitch sw, OFFlowRemoved msg, FloodlightContext cntx) { U64 cookie = msg.getCookie(); /** * This is just to sanity check our assumption that static flows * never expire. */ if (AppCookie.extractApp(cookie) == STATIC_FLOW_APP_ID) { OFFlowRemovedReason reason = null; switch (msg.getVersion()) { case OF_10: reason = OFFlowRemovedReasonSerializerVer10.ofWireValue((byte) msg.getReason()); break; case OF_11: reason = OFFlowRemovedReasonSerializerVer11.ofWireValue((byte) msg.getReason()); break; case OF_12: reason = OFFlowRemovedReasonSerializerVer12.ofWireValue((byte) msg.getReason()); break; case OF_13: reason = OFFlowRemovedReasonSerializerVer13.ofWireValue((byte) msg.getReason()); break; case OF_14: reason = OFFlowRemovedReasonSerializerVer14.ofWireValue((byte) msg.getReason()); break; default: log.debug("OpenFlow version {} unsupported for OFFlowRemovedReasonSerializerVerXX", msg.getVersion()); break; } if (reason != null) { if (OFFlowRemovedReason.DELETE == reason) { log.error("Got a FlowRemove message for a infinite " + "timeout flow: {} from switch {}", msg, sw); } else if (OFFlowRemovedReason.HARD_TIMEOUT == reason || OFFlowRemovedReason.IDLE_TIMEOUT == reason) { /* Remove the Flow from the DB since it timed out */ log.debug("Received an IDLE or HARD timeout for an SFP flow. Removing it from the SFP DB."); /* * Lookup the flow based on the flow contents. We do not know/care about the name of the * flow based on this message, but we can get the table values for this switch and search. */ String flowToRemove = null; Map<String, OFFlowMod> flowsByName = getFlows(sw.getId()); for (Map.Entry<String, OFFlowMod> entry : flowsByName.entrySet()) { if (msg.getCookie().equals(entry.getValue().getCookie()) && (msg.getVersion().compareTo(OFVersion.OF_12) < 0 ? true : msg.getHardTimeout() == entry.getValue().getHardTimeout()) && msg.getIdleTimeout() == entry.getValue().getIdleTimeout() && msg.getMatch().equals(entry.getValue().getMatch()) && msg.getPriority() == entry.getValue().getPriority() && (msg.getVersion().compareTo(OFVersion.OF_10) == 0 ? true : msg.getTableId().equals(entry.getValue().getTableId())) ) { flowToRemove = entry.getKey(); break; } } log.debug("Flow to Remove: {}", flowToRemove); /* * Remove the flow. This will send the delete message to the switch, * since we cannot tell the storage listener rowsdeleted() that we * are only removing our local DB copy of the flow and that it actually * timed out on the switch and is already gone. The switch will silently * discard the delete message in this case. * * TODO: We should come up with a way to convey to the storage listener * the reason for the flow being removed. */ if (flowToRemove != null) { deleteFlow(flowToRemove); } } /* Stop the processing chain since we sent or asked for the delete message. */ return Command.STOP; } } /* Continue the processing chain, since we did not send the delete. */ return Command.CONTINUE; } @Override @LogMessageDoc(level="ERROR", message="Got a FlowRemove message for a infinite " + "timeout flow: {flow} from switch {switch}", explanation="Flows with infinite timeouts should not expire. " + "The switch has expired the flow anyway.", recommendation=LogMessageDoc.REPORT_SWITCH_BUG) public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { switch (msg.getType()) { case FLOW_REMOVED: return handleFlowRemoved(sw, (OFFlowRemoved) msg, cntx); default: return Command.CONTINUE; } } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { return false; // no dependency for non-packet in } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { return false; // no dependency for non-packet in } // IFloodlightModule @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IStaticFlowEntryPusherService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IStaticFlowEntryPusherService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IFloodlightProviderService.class); l.add(IOFSwitchService.class); l.add(IStorageSourceService.class); l.add(IRestApiService.class); return l; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProviderService = context.getServiceImpl(IFloodlightProviderService.class); switchService = context.getServiceImpl(IOFSwitchService.class); storageSourceService = context.getServiceImpl(IStorageSourceService.class); restApiService = context.getServiceImpl(IRestApiService.class); haListener = new HAListenerDelegate(); } @Override public void startUp(FloodlightModuleContext context) { floodlightProviderService.addOFMessageListener(OFType.FLOW_REMOVED, this); switchService.addOFSwitchListener(this); floodlightProviderService.addHAListener(this.haListener); // assumes no switches connected at startup() storageSourceService.createTable(TABLE_NAME, null); storageSourceService.setTablePrimaryKeyName(TABLE_NAME, COLUMN_NAME); storageSourceService.addListener(TABLE_NAME, this); entriesFromStorage = readEntriesFromStorage(); entry2dpid = computeEntry2DpidMap(entriesFromStorage); restApiService.addRestletRoutable(new StaticFlowEntryWebRoutable()); } // IStaticFlowEntryPusherService methods @Override public void addFlow(String name, OFFlowMod fm, DatapathId swDpid) { try { Map<String, Object> fmMap = StaticFlowEntries.flowModToStorageEntry(fm, swDpid.toString(), name); storageSourceService.insertRowAsync(TABLE_NAME, fmMap); } catch (Exception e) { log.error("Error! Check the fields specified for the flow.Make sure IPv4 fields are not mixed with IPv6 fields or all " + "mandatory fields are specified. "); } } @Override public void deleteFlow(String name) { storageSourceService.deleteRowAsync(TABLE_NAME, name); } @Override public void deleteAllFlows() { for (String entry : entry2dpid.keySet()) { deleteFlow(entry); } /* FIXME: Since the OF spec 1.0 is not clear on how to match on cookies. Once all switches come to a common implementation we can possibly re-enable this fix. // Send a delete for each switch Set<String> swSet = new HashSet<String>(); for (String dpid : entry2dpid.values()) { // Avoid sending duplicate deletes if (!swSet.contains(dpid)) { swSet.add(dpid); sendDeleteByCookie(HexString.toLong(dpid)); } } // Clear our map entry2dpid.clear(); // Clear our book keeping map for (Map<String, OFFlowMod> eMap : entriesFromStorage.values()) { eMap.clear(); } // Reset our DB storageSource.deleteMatchingRowsAsync(TABLE_NAME, null); */ } @Override public void deleteFlowsForSwitch(DatapathId dpid) { String sDpid = dpid.toString(); for (Entry<String, String> e : entry2dpid.entrySet()) { if (e.getValue().equals(sDpid)) deleteFlow(e.getKey()); } /* FIXME: Since the OF spec 1.0 is not clear on how to match on cookies. Once all switches come to a common implementation we can possibly re-enable this fix. //sendDeleteByCookie(dpid); String sDpid = HexString.toHexString(dpid); // Clear all internal flows for this switch Map<String, OFFlowMod> sMap = entriesFromStorage.get(sDpid); if (sMap != null) { for (String entryName : sMap.keySet()) { entry2dpid.remove(entryName); // Delete from DB deleteFlow(entryName); } sMap.clear(); } else { log.warn("Map of storage entries for switch {} was null", sDpid); } */ } /** * Deletes all flows installed by static flow pusher on a given switch. * We send a delete flow mod with the static flow pusher app ID in the cookie. * Since OF1.0 doesn't support masking based on the cookie we have to * disable having flow specific cookies. * @param dpid The DPID of the switch to clear all it's flows. */ /* FIXME: Since the OF spec 1.0 is not clear on how to match on cookies. Once all switches come to a common implementation we can possibly re-enable this fix. private void sendDeleteByCookie(long dpid) { if (log.isDebugEnabled()) log.debug("Deleting all static flows on switch {}", HexString.toHexString(dpid)); IOFSwitch sw = floodlightProvider.getSwitch(dpid); if (sw == null) { log.warn("Tried to delete static flows for non-existant switch {}", HexString.toHexString(dpid)); return; } OFFlowMod fm = (OFFlowMod) floodlightProvider.getOFMessageFactory(). getMessage(OFType.FLOW_MOD); OFMatch ofm = new OFMatch(); fm.setMatch(ofm); fm.setCookie(AppCookie.makeCookie(StaticFlowEntryPusher.STATIC_FLOW_APP_ID, 0)); fm.setCommand(OFFlowMod.OFPFC_DELETE); fm.setOutPort(OFPort.OFPP_NONE); try { sw.write(fm, null); sw.flush(); } catch (IOException e1) { log.error("Error deleting all flows for switch {}:\n {}", HexString.toHexString(dpid), e1.getMessage()); return; } } */ @Override public Map<String, Map<String, OFFlowMod>> getFlows() { return entriesFromStorage; } @Override public Map<String, OFFlowMod> getFlows(DatapathId dpid) { return entriesFromStorage.get(dpid.toString()); } // IHAListener private class HAListenerDelegate implements IHAListener { @Override public void transitionToActive() { log.debug("Re-reading static flows from storage due " + "to HA change from STANDBY->ACTIVE"); entriesFromStorage = readEntriesFromStorage(); entry2dpid = computeEntry2DpidMap(entriesFromStorage); } @Override public void controllerNodeIPsChanged( Map<String, String> curControllerNodeIPs, Map<String, String> addedControllerNodeIPs, Map<String, String> removedControllerNodeIPs) { // ignore } @Override public String getName() { return StaticFlowEntryPusher.this.getName(); } @Override public boolean isCallbackOrderingPrereq(HAListenerTypeMarker type, String name) { return false; } @Override public boolean isCallbackOrderingPostreq(HAListenerTypeMarker type, String name) { return false; } @Override public void transitionToStandby() { log.debug("Controller is now in STANDBY role. Clearing static flow entries from store."); deleteAllFlows(); } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.licensemanager; import javax.annotation.Generated; import com.amazonaws.services.licensemanager.model.*; /** * Interface for accessing AWS License Manager asynchronously. Each asynchronous method will return a Java Future object * representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive * notification when an asynchronous operation completes. * <p> * <b>Note:</b> Do not directly implement this interface, new methods are added to it regularly. Extend from * {@link com.amazonaws.services.licensemanager.AbstractAWSLicenseManagerAsync} instead. * </p> * <p> * <fullname> AWS License Manager </fullname> * <p> * <i>This is the AWS License Manager API Reference.</i> It provides descriptions, syntax, and usage examples for each * of the actions and data types for License Manager. The topic for each action shows the Query API request parameters * and the XML response. You can also view the XML request elements in the WSDL. * </p> * <p> * Alternatively, you can use one of the AWS SDKs to access an API that's tailored to the programming language or * platform that you're using. For more information, see <a href="http://aws.amazon.com/tools/#SDKs">AWS SDKs</a>. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public interface AWSLicenseManagerAsync extends AWSLicenseManager { /** * <p> * Creates a new license configuration object. A license configuration is an abstraction of a customer license * agreement that can be consumed and enforced by License Manager. Components include specifications for the license * type (licensing by instance, socket, CPU, or VCPU), tenancy (shared tenancy, Amazon EC2 Dedicated Instance, * Amazon EC2 Dedicated Host, or any of these), host affinity (how long a VM must be associated with a host), the * number of licenses purchased and used. * </p> * * @param createLicenseConfigurationRequest * @return A Java Future containing the result of the CreateLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsync.CreateLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/CreateLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<CreateLicenseConfigurationResult> createLicenseConfigurationAsync( CreateLicenseConfigurationRequest createLicenseConfigurationRequest); /** * <p> * Creates a new license configuration object. A license configuration is an abstraction of a customer license * agreement that can be consumed and enforced by License Manager. Components include specifications for the license * type (licensing by instance, socket, CPU, or VCPU), tenancy (shared tenancy, Amazon EC2 Dedicated Instance, * Amazon EC2 Dedicated Host, or any of these), host affinity (how long a VM must be associated with a host), the * number of licenses purchased and used. * </p> * * @param createLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the CreateLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.CreateLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/CreateLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<CreateLicenseConfigurationResult> createLicenseConfigurationAsync( CreateLicenseConfigurationRequest createLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<CreateLicenseConfigurationRequest, CreateLicenseConfigurationResult> asyncHandler); /** * <p> * Deletes an existing license configuration. This action fails if the configuration is in use. * </p> * * @param deleteLicenseConfigurationRequest * @return A Java Future containing the result of the DeleteLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsync.DeleteLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/DeleteLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<DeleteLicenseConfigurationResult> deleteLicenseConfigurationAsync( DeleteLicenseConfigurationRequest deleteLicenseConfigurationRequest); /** * <p> * Deletes an existing license configuration. This action fails if the configuration is in use. * </p> * * @param deleteLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the DeleteLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.DeleteLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/DeleteLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<DeleteLicenseConfigurationResult> deleteLicenseConfigurationAsync( DeleteLicenseConfigurationRequest deleteLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<DeleteLicenseConfigurationRequest, DeleteLicenseConfigurationResult> asyncHandler); /** * <p> * Returns a detailed description of a license configuration. * </p> * * @param getLicenseConfigurationRequest * @return A Java Future containing the result of the GetLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsync.GetLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/GetLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<GetLicenseConfigurationResult> getLicenseConfigurationAsync(GetLicenseConfigurationRequest getLicenseConfigurationRequest); /** * <p> * Returns a detailed description of a license configuration. * </p> * * @param getLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the GetLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.GetLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/GetLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<GetLicenseConfigurationResult> getLicenseConfigurationAsync(GetLicenseConfigurationRequest getLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<GetLicenseConfigurationRequest, GetLicenseConfigurationResult> asyncHandler); /** * <p> * Gets License Manager settings for a region. Exposes the configured S3 bucket, SNS topic, etc., for inspection. * </p> * * @param getServiceSettingsRequest * @return A Java Future containing the result of the GetServiceSettings operation returned by the service. * @sample AWSLicenseManagerAsync.GetServiceSettings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/GetServiceSettings" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<GetServiceSettingsResult> getServiceSettingsAsync(GetServiceSettingsRequest getServiceSettingsRequest); /** * <p> * Gets License Manager settings for a region. Exposes the configured S3 bucket, SNS topic, etc., for inspection. * </p> * * @param getServiceSettingsRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the GetServiceSettings operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.GetServiceSettings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/GetServiceSettings" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<GetServiceSettingsResult> getServiceSettingsAsync(GetServiceSettingsRequest getServiceSettingsRequest, com.amazonaws.handlers.AsyncHandler<GetServiceSettingsRequest, GetServiceSettingsResult> asyncHandler); /** * <p> * Lists the resource associations for a license configuration. Resource associations need not consume licenses from * a license configuration. For example, an AMI or a stopped instance may not consume a license (depending on the * license rules). Use this operation to find all resources associated with a license configuration. * </p> * * @param listAssociationsForLicenseConfigurationRequest * @return A Java Future containing the result of the ListAssociationsForLicenseConfiguration operation returned by * the service. * @sample AWSLicenseManagerAsync.ListAssociationsForLicenseConfiguration * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListAssociationsForLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListAssociationsForLicenseConfigurationResult> listAssociationsForLicenseConfigurationAsync( ListAssociationsForLicenseConfigurationRequest listAssociationsForLicenseConfigurationRequest); /** * <p> * Lists the resource associations for a license configuration. Resource associations need not consume licenses from * a license configuration. For example, an AMI or a stopped instance may not consume a license (depending on the * license rules). Use this operation to find all resources associated with a license configuration. * </p> * * @param listAssociationsForLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListAssociationsForLicenseConfiguration operation returned by * the service. * @sample AWSLicenseManagerAsyncHandler.ListAssociationsForLicenseConfiguration * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListAssociationsForLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListAssociationsForLicenseConfigurationResult> listAssociationsForLicenseConfigurationAsync( ListAssociationsForLicenseConfigurationRequest listAssociationsForLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<ListAssociationsForLicenseConfigurationRequest, ListAssociationsForLicenseConfigurationResult> asyncHandler); /** * <p> * Lists license configuration objects for an account, each containing the name, description, license type, and * other license terms modeled from a license agreement. * </p> * * @param listLicenseConfigurationsRequest * @return A Java Future containing the result of the ListLicenseConfigurations operation returned by the service. * @sample AWSLicenseManagerAsync.ListLicenseConfigurations * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListLicenseConfigurations" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListLicenseConfigurationsResult> listLicenseConfigurationsAsync( ListLicenseConfigurationsRequest listLicenseConfigurationsRequest); /** * <p> * Lists license configuration objects for an account, each containing the name, description, license type, and * other license terms modeled from a license agreement. * </p> * * @param listLicenseConfigurationsRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListLicenseConfigurations operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.ListLicenseConfigurations * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListLicenseConfigurations" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListLicenseConfigurationsResult> listLicenseConfigurationsAsync( ListLicenseConfigurationsRequest listLicenseConfigurationsRequest, com.amazonaws.handlers.AsyncHandler<ListLicenseConfigurationsRequest, ListLicenseConfigurationsResult> asyncHandler); /** * <p> * Returns the license configuration for a resource. * </p> * * @param listLicenseSpecificationsForResourceRequest * @return A Java Future containing the result of the ListLicenseSpecificationsForResource operation returned by the * service. * @sample AWSLicenseManagerAsync.ListLicenseSpecificationsForResource * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListLicenseSpecificationsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListLicenseSpecificationsForResourceResult> listLicenseSpecificationsForResourceAsync( ListLicenseSpecificationsForResourceRequest listLicenseSpecificationsForResourceRequest); /** * <p> * Returns the license configuration for a resource. * </p> * * @param listLicenseSpecificationsForResourceRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListLicenseSpecificationsForResource operation returned by the * service. * @sample AWSLicenseManagerAsyncHandler.ListLicenseSpecificationsForResource * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListLicenseSpecificationsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListLicenseSpecificationsForResourceResult> listLicenseSpecificationsForResourceAsync( ListLicenseSpecificationsForResourceRequest listLicenseSpecificationsForResourceRequest, com.amazonaws.handlers.AsyncHandler<ListLicenseSpecificationsForResourceRequest, ListLicenseSpecificationsForResourceResult> asyncHandler); /** * <p> * Returns a detailed list of resources. * </p> * * @param listResourceInventoryRequest * @return A Java Future containing the result of the ListResourceInventory operation returned by the service. * @sample AWSLicenseManagerAsync.ListResourceInventory * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListResourceInventory" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListResourceInventoryResult> listResourceInventoryAsync(ListResourceInventoryRequest listResourceInventoryRequest); /** * <p> * Returns a detailed list of resources. * </p> * * @param listResourceInventoryRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListResourceInventory operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.ListResourceInventory * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListResourceInventory" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListResourceInventoryResult> listResourceInventoryAsync(ListResourceInventoryRequest listResourceInventoryRequest, com.amazonaws.handlers.AsyncHandler<ListResourceInventoryRequest, ListResourceInventoryResult> asyncHandler); /** * <p> * Lists tags attached to a resource. * </p> * * @param listTagsForResourceRequest * @return A Java Future containing the result of the ListTagsForResource operation returned by the service. * @sample AWSLicenseManagerAsync.ListTagsForResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListTagsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest); /** * <p> * Lists tags attached to a resource. * </p> * * @param listTagsForResourceRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListTagsForResource operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.ListTagsForResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListTagsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest, com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler); /** * <p> * Lists all license usage records for a license configuration, displaying license consumption details by resource * at a selected point in time. Use this action to audit the current license consumption for any license inventory * and configuration. * </p> * * @param listUsageForLicenseConfigurationRequest * @return A Java Future containing the result of the ListUsageForLicenseConfiguration operation returned by the * service. * @sample AWSLicenseManagerAsync.ListUsageForLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListUsageForLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListUsageForLicenseConfigurationResult> listUsageForLicenseConfigurationAsync( ListUsageForLicenseConfigurationRequest listUsageForLicenseConfigurationRequest); /** * <p> * Lists all license usage records for a license configuration, displaying license consumption details by resource * at a selected point in time. Use this action to audit the current license consumption for any license inventory * and configuration. * </p> * * @param listUsageForLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the ListUsageForLicenseConfiguration operation returned by the * service. * @sample AWSLicenseManagerAsyncHandler.ListUsageForLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/ListUsageForLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<ListUsageForLicenseConfigurationResult> listUsageForLicenseConfigurationAsync( ListUsageForLicenseConfigurationRequest listUsageForLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<ListUsageForLicenseConfigurationRequest, ListUsageForLicenseConfigurationResult> asyncHandler); /** * <p> * Attach one of more tags to any resource. * </p> * * @param tagResourceRequest * @return A Java Future containing the result of the TagResource operation returned by the service. * @sample AWSLicenseManagerAsync.TagResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/TagResource" target="_top">AWS * API Documentation</a> */ java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest); /** * <p> * Attach one of more tags to any resource. * </p> * * @param tagResourceRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the TagResource operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.TagResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/TagResource" target="_top">AWS * API Documentation</a> */ java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest, com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler); /** * <p> * Remove tags from a resource. * </p> * * @param untagResourceRequest * @return A Java Future containing the result of the UntagResource operation returned by the service. * @sample AWSLicenseManagerAsync.UntagResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UntagResource" target="_top">AWS * API Documentation</a> */ java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest); /** * <p> * Remove tags from a resource. * </p> * * @param untagResourceRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the UntagResource operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.UntagResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UntagResource" target="_top">AWS * API Documentation</a> */ java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest, com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler); /** * <p> * Modifies the attributes of an existing license configuration object. A license configuration is an abstraction of * a customer license agreement that can be consumed and enforced by License Manager. Components include * specifications for the license type (Instances, cores, sockets, VCPUs), tenancy (shared or Dedicated Host), host * affinity (how long a VM is associated with a host), the number of licenses purchased and used. * </p> * * @param updateLicenseConfigurationRequest * @return A Java Future containing the result of the UpdateLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsync.UpdateLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateLicenseConfigurationResult> updateLicenseConfigurationAsync( UpdateLicenseConfigurationRequest updateLicenseConfigurationRequest); /** * <p> * Modifies the attributes of an existing license configuration object. A license configuration is an abstraction of * a customer license agreement that can be consumed and enforced by License Manager. Components include * specifications for the license type (Instances, cores, sockets, VCPUs), tenancy (shared or Dedicated Host), host * affinity (how long a VM is associated with a host), the number of licenses purchased and used. * </p> * * @param updateLicenseConfigurationRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the UpdateLicenseConfiguration operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.UpdateLicenseConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateLicenseConfiguration" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateLicenseConfigurationResult> updateLicenseConfigurationAsync( UpdateLicenseConfigurationRequest updateLicenseConfigurationRequest, com.amazonaws.handlers.AsyncHandler<UpdateLicenseConfigurationRequest, UpdateLicenseConfigurationResult> asyncHandler); /** * <p> * Adds or removes license configurations for a specified AWS resource. This operation currently supports updating * the license specifications of AMIs, instances, and hosts. Launch templates and AWS CloudFormation templates are * not managed from this operation as those resources send the license configurations directly to a resource * creation operation, such as <code>RunInstances</code>. * </p> * * @param updateLicenseSpecificationsForResourceRequest * @return A Java Future containing the result of the UpdateLicenseSpecificationsForResource operation returned by * the service. * @sample AWSLicenseManagerAsync.UpdateLicenseSpecificationsForResource * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateLicenseSpecificationsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateLicenseSpecificationsForResourceResult> updateLicenseSpecificationsForResourceAsync( UpdateLicenseSpecificationsForResourceRequest updateLicenseSpecificationsForResourceRequest); /** * <p> * Adds or removes license configurations for a specified AWS resource. This operation currently supports updating * the license specifications of AMIs, instances, and hosts. Launch templates and AWS CloudFormation templates are * not managed from this operation as those resources send the license configurations directly to a resource * creation operation, such as <code>RunInstances</code>. * </p> * * @param updateLicenseSpecificationsForResourceRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the UpdateLicenseSpecificationsForResource operation returned by * the service. * @sample AWSLicenseManagerAsyncHandler.UpdateLicenseSpecificationsForResource * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateLicenseSpecificationsForResource" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateLicenseSpecificationsForResourceResult> updateLicenseSpecificationsForResourceAsync( UpdateLicenseSpecificationsForResourceRequest updateLicenseSpecificationsForResourceRequest, com.amazonaws.handlers.AsyncHandler<UpdateLicenseSpecificationsForResourceRequest, UpdateLicenseSpecificationsForResourceResult> asyncHandler); /** * <p> * Updates License Manager service settings. * </p> * * @param updateServiceSettingsRequest * @return A Java Future containing the result of the UpdateServiceSettings operation returned by the service. * @sample AWSLicenseManagerAsync.UpdateServiceSettings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateServiceSettings" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateServiceSettingsResult> updateServiceSettingsAsync(UpdateServiceSettingsRequest updateServiceSettingsRequest); /** * <p> * Updates License Manager service settings. * </p> * * @param updateServiceSettingsRequest * @param asyncHandler * Asynchronous callback handler for events in the lifecycle of the request. Users can provide an * implementation of the callback methods in this interface to receive notification of successful or * unsuccessful completion of the operation. * @return A Java Future containing the result of the UpdateServiceSettings operation returned by the service. * @sample AWSLicenseManagerAsyncHandler.UpdateServiceSettings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/license-manager-2018-08-01/UpdateServiceSettings" * target="_top">AWS API Documentation</a> */ java.util.concurrent.Future<UpdateServiceSettingsResult> updateServiceSettingsAsync(UpdateServiceSettingsRequest updateServiceSettingsRequest, com.amazonaws.handlers.AsyncHandler<UpdateServiceSettingsRequest, UpdateServiceSettingsResult> asyncHandler); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.execute; import java.io.IOException; import java.nio.MappedByteBuffer; import java.sql.ParameterMetaData; import java.sql.SQLException; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.compile.ExplainPlan; import org.apache.phoenix.compile.GroupByCompiler.GroupBy; import org.apache.phoenix.compile.OrderByCompiler.OrderBy; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.RowProjector; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.exception.SQLExceptionInfo; import org.apache.phoenix.execute.TupleProjector.ProjectedValueTuple; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.iterate.DefaultParallelScanGrouper; import org.apache.phoenix.iterate.MappedByteBufferQueue; import org.apache.phoenix.iterate.ParallelScanGrouper; import org.apache.phoenix.iterate.ResultIterator; import org.apache.phoenix.jdbc.PhoenixParameterMetaData; import org.apache.phoenix.jdbc.PhoenixStatement.Operation; import org.apache.phoenix.parse.FilterableStatement; import org.apache.phoenix.parse.JoinTableNode.JoinType; import org.apache.phoenix.query.KeyRange; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.apache.phoenix.schema.KeyValueSchema; import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.TableRef; import org.apache.phoenix.schema.ValueBitSet; import org.apache.phoenix.schema.tuple.ResultTuple; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.util.ResultUtil; import org.apache.phoenix.util.SchemaUtil; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class SortMergeJoinPlan implements QueryPlan { private static final byte[] EMPTY_PTR = new byte[0]; private final StatementContext context; private final FilterableStatement statement; private final TableRef table; private final JoinType type; private final QueryPlan lhsPlan; private final QueryPlan rhsPlan; private final List<Expression> lhsKeyExpressions; private final List<Expression> rhsKeyExpressions; private final KeyValueSchema joinedSchema; private final KeyValueSchema lhsSchema; private final KeyValueSchema rhsSchema; private final int rhsFieldPosition; private final boolean isSingleValueOnly; private final Set<TableRef> tableRefs; private final int thresholdBytes; public SortMergeJoinPlan(StatementContext context, FilterableStatement statement, TableRef table, JoinType type, QueryPlan lhsPlan, QueryPlan rhsPlan, List<Expression> lhsKeyExpressions, List<Expression> rhsKeyExpressions, PTable joinedTable, PTable lhsTable, PTable rhsTable, int rhsFieldPosition, boolean isSingleValueOnly) { if (type == JoinType.Right) throw new IllegalArgumentException("JoinType should not be " + type); this.context = context; this.statement = statement; this.table = table; this.type = type; this.lhsPlan = lhsPlan; this.rhsPlan = rhsPlan; this.lhsKeyExpressions = lhsKeyExpressions; this.rhsKeyExpressions = rhsKeyExpressions; this.joinedSchema = buildSchema(joinedTable); this.lhsSchema = buildSchema(lhsTable); this.rhsSchema = buildSchema(rhsTable); this.rhsFieldPosition = rhsFieldPosition; this.isSingleValueOnly = isSingleValueOnly; this.tableRefs = Sets.newHashSetWithExpectedSize(lhsPlan.getSourceRefs().size() + rhsPlan.getSourceRefs().size()); this.tableRefs.addAll(lhsPlan.getSourceRefs()); this.tableRefs.addAll(rhsPlan.getSourceRefs()); this.thresholdBytes = context.getConnection().getQueryServices().getProps().getInt( QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES); } @Override public Operation getOperation() { return statement.getOperation(); } private static KeyValueSchema buildSchema(PTable table) { KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0); if (table != null) { for (PColumn column : table.getColumns()) { if (!SchemaUtil.isPKColumn(column)) { builder.addField(column); } } } return builder.build(); } @Override public ResultIterator iterator(ParallelScanGrouper scanGrouper) throws SQLException { return type == JoinType.Semi || type == JoinType.Anti ? new SemiAntiJoinIterator(lhsPlan.iterator(scanGrouper), rhsPlan.iterator(scanGrouper)) : new BasicJoinIterator(lhsPlan.iterator(scanGrouper), rhsPlan.iterator(scanGrouper)); } @Override public ResultIterator iterator() throws SQLException { return iterator(DefaultParallelScanGrouper.getInstance()); } @Override public ExplainPlan getExplainPlan() throws SQLException { List<String> steps = Lists.newArrayList(); steps.add("SORT-MERGE-JOIN (" + type.toString().toUpperCase() + ") TABLES"); for (String step : lhsPlan.getExplainPlan().getPlanSteps()) { steps.add(" " + step); } steps.add("AND" + (rhsSchema.getFieldCount() == 0 ? " (SKIP MERGE)" : "")); for (String step : rhsPlan.getExplainPlan().getPlanSteps()) { steps.add(" " + step); } return new ExplainPlan(steps); } @Override public StatementContext getContext() { return context; } @Override public ParameterMetaData getParameterMetaData() { return PhoenixParameterMetaData.EMPTY_PARAMETER_META_DATA; } @Override public long getEstimatedSize() { return lhsPlan.getEstimatedSize() + rhsPlan.getEstimatedSize(); } @Override public TableRef getTableRef() { return table; } @Override public RowProjector getProjector() { return null; } @Override public Integer getLimit() { return null; } @Override public OrderBy getOrderBy() { return null; } @Override public GroupBy getGroupBy() { return null; } @Override public List<KeyRange> getSplits() { return Collections.<KeyRange> emptyList(); } @Override public List<List<Scan>> getScans() { return Collections.<List<Scan>> emptyList(); } @Override public FilterableStatement getStatement() { return statement; } @Override public boolean isDegenerate() { return false; } @Override public boolean isRowKeyOrdered() { return false; } private class BasicJoinIterator implements ResultIterator { private final ResultIterator lhsIterator; private final ResultIterator rhsIterator; private boolean initialized; private Tuple lhsTuple; private Tuple rhsTuple; private JoinKey lhsKey; private JoinKey rhsKey; private Tuple nextLhsTuple; private Tuple nextRhsTuple; private JoinKey nextLhsKey; private JoinKey nextRhsKey; private ValueBitSet destBitSet; private ValueBitSet lhsBitSet; private ValueBitSet rhsBitSet; private byte[] emptyProjectedValue; private MappedByteBufferTupleQueue queue; private Iterator<Tuple> queueIterator; public BasicJoinIterator(ResultIterator lhsIterator, ResultIterator rhsIterator) { this.lhsIterator = lhsIterator; this.rhsIterator = rhsIterator; this.initialized = false; this.lhsTuple = null; this.rhsTuple = null; this.lhsKey = new JoinKey(lhsKeyExpressions); this.rhsKey = new JoinKey(rhsKeyExpressions); this.nextLhsTuple = null; this.nextRhsTuple = null; this.nextLhsKey = new JoinKey(lhsKeyExpressions); this.nextRhsKey = new JoinKey(rhsKeyExpressions); this.destBitSet = ValueBitSet.newInstance(joinedSchema); this.lhsBitSet = ValueBitSet.newInstance(lhsSchema); this.rhsBitSet = ValueBitSet.newInstance(rhsSchema); lhsBitSet.clear(); int len = lhsBitSet.getEstimatedLength(); this.emptyProjectedValue = new byte[len]; lhsBitSet.toBytes(emptyProjectedValue, 0); this.queue = new MappedByteBufferTupleQueue(thresholdBytes); this.queueIterator = null; } @Override public void close() throws SQLException { lhsIterator.close(); rhsIterator.close(); queue.close(); } @Override public Tuple next() throws SQLException { if (!initialized) { init(); } Tuple next = null; while (next == null && !isEnd()) { if (queueIterator != null) { if (queueIterator.hasNext()) { next = join(lhsTuple, queueIterator.next()); } else { boolean eq = nextLhsTuple != null && lhsKey.equals(nextLhsKey); advance(true); if (eq) { queueIterator = queue.iterator(); } else { queue.clear(); queueIterator = null; } } } else if (lhsTuple != null) { if (rhsTuple != null) { if (lhsKey.equals(rhsKey)) { next = join(lhsTuple, rhsTuple); if (nextLhsTuple != null && lhsKey.equals(nextLhsKey)) { queue.offer(rhsTuple); if (nextRhsTuple == null || !rhsKey.equals(nextRhsKey)) { queueIterator = queue.iterator(); advance(true); } else if (isSingleValueOnly) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.SINGLE_ROW_SUBQUERY_RETURNS_MULTIPLE_ROWS).build().buildException(); } } else if (nextRhsTuple == null || !rhsKey.equals(nextRhsKey)) { advance(true); } else if (isSingleValueOnly) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.SINGLE_ROW_SUBQUERY_RETURNS_MULTIPLE_ROWS).build().buildException(); } advance(false); } else if (lhsKey.compareTo(rhsKey) < 0) { if (type == JoinType.Full || type == JoinType.Left) { next = join(lhsTuple, null); } advance(true); } else { if (type == JoinType.Full) { next = join(null, rhsTuple); } advance(false); } } else { // left-join or full-join next = join(lhsTuple, null); advance(true); } } else { // full-join next = join(null, rhsTuple); advance(false); } } return next; } @Override public void explain(List<String> planSteps) { } private void init() throws SQLException { nextLhsTuple = lhsIterator.next(); if (nextLhsTuple != null) { nextLhsKey.evaluate(nextLhsTuple); } advance(true); nextRhsTuple = rhsIterator.next(); if (nextRhsTuple != null) { nextRhsKey.evaluate(nextRhsTuple); } advance(false); initialized = true; } private void advance(boolean lhs) throws SQLException { if (lhs) { lhsTuple = nextLhsTuple; lhsKey.set(nextLhsKey); if (lhsTuple != null) { nextLhsTuple = lhsIterator.next(); if (nextLhsTuple != null) { nextLhsKey.evaluate(nextLhsTuple); } else { nextLhsKey.clear(); } } } else { rhsTuple = nextRhsTuple; rhsKey.set(nextRhsKey); if (rhsTuple != null) { nextRhsTuple = rhsIterator.next(); if (nextRhsTuple != null) { nextRhsKey.evaluate(nextRhsTuple); } else { nextRhsKey.clear(); } } } } private boolean isEnd() { return (lhsTuple == null && (rhsTuple == null || type != JoinType.Full)) || (queueIterator == null && rhsTuple == null && type == JoinType.Inner); } private Tuple join(Tuple lhs, Tuple rhs) throws SQLException { try { ProjectedValueTuple t = null; if (lhs == null) { t = new ProjectedValueTuple(rhs, rhs.getValue(0).getTimestamp(), this.emptyProjectedValue, 0, this.emptyProjectedValue.length, this.emptyProjectedValue.length); } else if (lhs instanceof ProjectedValueTuple) { t = (ProjectedValueTuple) lhs; } else { ImmutableBytesWritable ptr = context.getTempPtr(); TupleProjector.decodeProjectedValue(lhs, ptr); lhsBitSet.clear(); lhsBitSet.or(ptr); int bitSetLen = lhsBitSet.getEstimatedLength(); t = new ProjectedValueTuple(lhs, lhs.getValue(0).getTimestamp(), ptr.get(), ptr.getOffset(), ptr.getLength(), bitSetLen); } return rhsBitSet == ValueBitSet.EMPTY_VALUE_BITSET ? t : TupleProjector.mergeProjectedValue( t, joinedSchema, destBitSet, rhs, rhsSchema, rhsBitSet, rhsFieldPosition); } catch (IOException e) { throw new SQLException(e); } } } private class SemiAntiJoinIterator implements ResultIterator { private final ResultIterator lhsIterator; private final ResultIterator rhsIterator; private final boolean isSemi; private boolean initialized; private Tuple lhsTuple; private Tuple rhsTuple; private JoinKey lhsKey; private JoinKey rhsKey; public SemiAntiJoinIterator(ResultIterator lhsIterator, ResultIterator rhsIterator) { if (type != JoinType.Semi && type != JoinType.Anti) throw new IllegalArgumentException("Type " + type + " is not allowed by " + SemiAntiJoinIterator.class.getName()); this.lhsIterator = lhsIterator; this.rhsIterator = rhsIterator; this.isSemi = type == JoinType.Semi; this.initialized = false; this.lhsTuple = null; this.rhsTuple = null; this.lhsKey = new JoinKey(lhsKeyExpressions); this.rhsKey = new JoinKey(rhsKeyExpressions); } @Override public void close() throws SQLException { lhsIterator.close(); rhsIterator.close(); } @Override public Tuple next() throws SQLException { if (!initialized) { advance(true); advance(false); initialized = true; } Tuple next = null; while (lhsTuple != null && next == null) { if (rhsTuple != null) { if (lhsKey.equals(rhsKey)) { if (isSemi) { next = lhsTuple; } advance(true); } else if (lhsKey.compareTo(rhsKey) < 0) { if (!isSemi) { next = lhsTuple; } advance(true); } else { advance(false); } } else { if (!isSemi) { next = lhsTuple; } advance(true); } } return next; } @Override public void explain(List<String> planSteps) { } private void advance(boolean lhs) throws SQLException { if (lhs) { lhsTuple = lhsIterator.next(); if (lhsTuple != null) { lhsKey.evaluate(lhsTuple); } else { lhsKey.clear(); } } else { rhsTuple = rhsIterator.next(); if (rhsTuple != null) { rhsKey.evaluate(rhsTuple); } else { rhsKey.clear(); } } } } private static class JoinKey implements Comparable<JoinKey> { private final List<Expression> expressions; private final List<ImmutableBytesWritable> keys; public JoinKey(List<Expression> expressions) { this.expressions = expressions; this.keys = Lists.newArrayListWithExpectedSize(expressions.size()); for (int i = 0; i < expressions.size(); i++) { this.keys.add(new ImmutableBytesWritable(EMPTY_PTR)); } } public void evaluate(Tuple tuple) { for (int i = 0; i < keys.size(); i++) { if (!expressions.get(i).evaluate(tuple, keys.get(i))) { keys.get(i).set(EMPTY_PTR); } } } public void set(JoinKey other) { for (int i = 0; i < keys.size(); i++) { ImmutableBytesWritable key = other.keys.get(i); this.keys.get(i).set(key.get(), key.getOffset(), key.getLength()); } } public void clear() { for (int i = 0; i < keys.size(); i++) { this.keys.get(i).set(EMPTY_PTR); } } @Override public boolean equals(Object other) { if (!(other instanceof JoinKey)) return false; return this.compareTo((JoinKey) other) == 0; } @Override public int compareTo(JoinKey other) { for (int i = 0; i < keys.size(); i++) { int comp = this.keys.get(i).compareTo(other.keys.get(i)); if (comp != 0) return comp; } return 0; } } private static class MappedByteBufferTupleQueue extends MappedByteBufferQueue<Tuple> { public MappedByteBufferTupleQueue(int thresholdBytes) { super(thresholdBytes); } @Override protected MappedByteBufferSegmentQueue<Tuple> createSegmentQueue( int index, int thresholdBytes) { return new MappedByteBufferTupleSegmentQueue(index, thresholdBytes, false); } @Override protected Comparator<MappedByteBufferSegmentQueue<Tuple>> getSegmentQueueComparator() { return new Comparator<MappedByteBufferSegmentQueue<Tuple>>() { @Override public int compare(MappedByteBufferSegmentQueue<Tuple> q1, MappedByteBufferSegmentQueue<Tuple> q2) { return q1.index() - q2.index(); } }; } @Override public Iterator<Tuple> iterator() { return new Iterator<Tuple>() { private Iterator<MappedByteBufferSegmentQueue<Tuple>> queueIter; private Iterator<Tuple> currentIter; { this.queueIter = getSegmentQueues().iterator(); this.currentIter = queueIter.hasNext() ? queueIter.next().iterator() : null; } @Override public boolean hasNext() { return currentIter != null && currentIter.hasNext(); } @Override public Tuple next() { if (!hasNext()) return null; Tuple ret = currentIter.next(); if (!currentIter.hasNext()) { this.currentIter = queueIter.hasNext() ? queueIter.next().iterator() : null; } return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } private static class MappedByteBufferTupleSegmentQueue extends MappedByteBufferSegmentQueue<Tuple> { private LinkedList<Tuple> results; public MappedByteBufferTupleSegmentQueue(int index, int thresholdBytes, boolean hasMaxQueueSize) { super(index, thresholdBytes, hasMaxQueueSize); this.results = Lists.newLinkedList(); } @Override protected Queue<Tuple> getInMemoryQueue() { return results; } @Override protected int sizeOf(Tuple e) { KeyValue kv = KeyValueUtil.ensureKeyValue(e.getValue(0)); return Bytes.SIZEOF_INT * 2 + kv.getLength(); } @SuppressWarnings("deprecation") @Override protected void writeToBuffer(MappedByteBuffer buffer, Tuple e) { KeyValue kv = KeyValueUtil.ensureKeyValue(e.getValue(0)); buffer.putInt(kv.getLength() + Bytes.SIZEOF_INT); buffer.putInt(kv.getLength()); buffer.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); } @Override protected Tuple readFromBuffer(MappedByteBuffer buffer) { int length = buffer.getInt(); if (length < 0) return null; byte[] b = new byte[length]; buffer.get(b); Result result = ResultUtil.toResult(new ImmutableBytesWritable(b)); return new ResultTuple(result); } } } @Override public boolean useRoundRobinIterator() { return false; } @Override public Set<TableRef> getSourceRefs() { return tableRefs; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Range; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.concurrent.ExecutorUtil; import com.google.devtools.build.lib.concurrent.Sharder; import com.google.devtools.build.lib.concurrent.ThrowableRecordingRunnableWrapper; import com.google.devtools.build.lib.profiler.AutoProfiler; import com.google.devtools.build.lib.profiler.AutoProfiler.ElapsedTimeReceiver; import com.google.devtools.build.lib.skyframe.SkyValueDirtinessChecker.DirtyResult; import com.google.devtools.build.lib.skyframe.TreeArtifactValue.TreeArtifactException; import com.google.devtools.build.lib.util.LoggingUtil; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.BatchStat; import com.google.devtools.build.lib.vfs.FileStatusWithDigest; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.Differencer; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.WalkableGraph; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; /** * A helper class to find dirty values by accessing the filesystem directly (contrast with * {@link DiffAwareness}). */ public class FilesystemValueChecker { private static final int DIRTINESS_CHECK_THREADS = 200; private static final Logger LOG = Logger.getLogger(FilesystemValueChecker.class.getName()); private static final Predicate<SkyKey> ACTION_FILTER = SkyFunctionName.functionIs(SkyFunctions.ACTION_EXECUTION); private final TimestampGranularityMonitor tsgm; @Nullable private final Range<Long> lastExecutionTimeRange; private AtomicInteger modifiedOutputFilesCounter = new AtomicInteger(0); private AtomicInteger modifiedOutputFilesIntraBuildCounter = new AtomicInteger(0); public FilesystemValueChecker(@Nullable TimestampGranularityMonitor tsgm, @Nullable Range<Long> lastExecutionTimeRange) { this.tsgm = tsgm; this.lastExecutionTimeRange = lastExecutionTimeRange; } /** * Returns a {@link Differencer.DiffWithDelta} containing keys from the give map that are dirty * according to the passed-in {@code dirtinessChecker}. */ // TODO(bazel-team): Refactor these methods so that FilesystemValueChecker only operates on a // WalkableGraph. Differencer.DiffWithDelta getDirtyKeys(Map<SkyKey, SkyValue> valuesMap, SkyValueDirtinessChecker dirtinessChecker) throws InterruptedException { return getDirtyValues(new MapBackedValueFetcher(valuesMap), valuesMap.keySet(), dirtinessChecker, /*checkMissingValues=*/false); } /** * Returns a {@link Differencer.DiffWithDelta} containing keys that are dirty according to the * passed-in {@code dirtinessChecker}. */ public Differencer.DiffWithDelta getNewAndOldValues(Map<SkyKey, SkyValue> valuesMap, Iterable<SkyKey> keys, SkyValueDirtinessChecker dirtinessChecker) throws InterruptedException { return getDirtyValues(new MapBackedValueFetcher(valuesMap), keys, dirtinessChecker, /*checkMissingValues=*/true); } /** * Returns a {@link Differencer.DiffWithDelta} containing keys that are dirty according to the * passed-in {@code dirtinessChecker}. */ public Differencer.DiffWithDelta getNewAndOldValues(WalkableGraph walkableGraph, Iterable<SkyKey> keys, SkyValueDirtinessChecker dirtinessChecker) throws InterruptedException { return getDirtyValues(new WalkableGraphBackedValueFetcher(walkableGraph), keys, dirtinessChecker, /*checkMissingValues=*/true); } private interface ValueFetcher { @Nullable SkyValue get(SkyKey key) throws InterruptedException; } private static class WalkableGraphBackedValueFetcher implements ValueFetcher { private final WalkableGraph walkableGraph; private WalkableGraphBackedValueFetcher(WalkableGraph walkableGraph) { this.walkableGraph = walkableGraph; } @Override @Nullable public SkyValue get(SkyKey key) throws InterruptedException { return walkableGraph.exists(key) ? walkableGraph.getValue(key) : null; } } private static class MapBackedValueFetcher implements ValueFetcher { private final Map<SkyKey, SkyValue> valuesMap; private MapBackedValueFetcher(Map<SkyKey, SkyValue> valuesMap) { this.valuesMap = valuesMap; } @Override @Nullable public SkyValue get(SkyKey key) { return valuesMap.get(key); } } /** * Return a collection of action values which have output files that are not in-sync with * the on-disk file value (were modified externally). */ Collection<SkyKey> getDirtyActionValues(Map<SkyKey, SkyValue> valuesMap, @Nullable final BatchStat batchStatter, ModifiedFileSet modifiedOutputFiles) throws InterruptedException { if (modifiedOutputFiles == ModifiedFileSet.NOTHING_MODIFIED) { LOG.info("Not checking for dirty actions since nothing was modified"); return ImmutableList.of(); } LOG.info("Accumulating dirty actions"); final int numOutputJobs = Runtime.getRuntime().availableProcessors() * 4; final Set<SkyKey> actionSkyKeys = new HashSet<>(); for (SkyKey key : valuesMap.keySet()) { if (ACTION_FILTER.apply(key)) { actionSkyKeys.add(key); } } final Sharder<Pair<SkyKey, ActionExecutionValue>> outputShards = new Sharder<>(numOutputJobs, actionSkyKeys.size()); for (SkyKey key : actionSkyKeys) { outputShards.add(Pair.of(key, (ActionExecutionValue) valuesMap.get(key))); } LOG.info("Sharded action values for batching"); ExecutorService executor = Executors.newFixedThreadPool( numOutputJobs, new ThreadFactoryBuilder().setNameFormat("FileSystem Output File Invalidator %d").build()); Collection<SkyKey> dirtyKeys = Sets.newConcurrentHashSet(); ThrowableRecordingRunnableWrapper wrapper = new ThrowableRecordingRunnableWrapper("FileSystemValueChecker#getDirtyActionValues"); modifiedOutputFilesCounter.set(0); modifiedOutputFilesIntraBuildCounter.set(0); final ImmutableSet<PathFragment> knownModifiedOutputFiles = modifiedOutputFiles == ModifiedFileSet.EVERYTHING_MODIFIED ? null : modifiedOutputFiles.modifiedSourceFiles(); // Initialized lazily through a supplier because it is only used to check modified // TreeArtifacts, which are not frequently used in builds. Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles = Suppliers.memoize(new Supplier<NavigableSet<PathFragment>>() { @Override public NavigableSet<PathFragment> get() { if (knownModifiedOutputFiles == null) { return null; } else { return ImmutableSortedSet.copyOf(knownModifiedOutputFiles); } } }); for (List<Pair<SkyKey, ActionExecutionValue>> shard : outputShards) { Runnable job = (batchStatter == null) ? outputStatJob(dirtyKeys, shard, knownModifiedOutputFiles, sortedKnownModifiedOutputFiles) : batchStatJob(dirtyKeys, shard, batchStatter, knownModifiedOutputFiles, sortedKnownModifiedOutputFiles); executor.submit(wrapper.wrap(job)); } boolean interrupted = ExecutorUtil.interruptibleShutdown(executor); Throwables.propagateIfPossible(wrapper.getFirstThrownError()); LOG.info("Completed output file stat checks"); if (interrupted) { throw new InterruptedException(); } return dirtyKeys; } private Runnable batchStatJob(final Collection<SkyKey> dirtyKeys, final List<Pair<SkyKey, ActionExecutionValue>> shard, final BatchStat batchStatter, final ImmutableSet<PathFragment> knownModifiedOutputFiles, final Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles) { return new Runnable() { @Override public void run() { Map<Artifact, Pair<SkyKey, ActionExecutionValue>> fileToKeyAndValue = new HashMap<>(); Map<Artifact, Pair<SkyKey, ActionExecutionValue>> treeArtifactsToKeyAndValue = new HashMap<>(); for (Pair<SkyKey, ActionExecutionValue> keyAndValue : shard) { ActionExecutionValue actionValue = keyAndValue.getSecond(); if (actionValue == null) { dirtyKeys.add(keyAndValue.getFirst()); } else { for (Artifact artifact : actionValue.getAllFileValues().keySet()) { if (shouldCheckFile(knownModifiedOutputFiles, artifact)) { fileToKeyAndValue.put(artifact, keyAndValue); } } for (Artifact artifact : actionValue.getAllTreeArtifactValues().keySet()) { if (shouldCheckTreeArtifact(sortedKnownModifiedOutputFiles.get(), artifact)) { treeArtifactsToKeyAndValue.put(artifact, keyAndValue); } } } } List<Artifact> artifacts = ImmutableList.copyOf(fileToKeyAndValue.keySet()); List<FileStatusWithDigest> stats; try { stats = batchStatter.batchStat(/*includeDigest=*/true, /*includeLinks=*/true, Artifact.asPathFragments(artifacts)); } catch (IOException e) { // Batch stat did not work. Log an exception and fall back on system calls. LoggingUtil.logToRemote(Level.WARNING, "Unable to process batch stat", e); outputStatJob(dirtyKeys, shard, knownModifiedOutputFiles, sortedKnownModifiedOutputFiles) .run(); return; } catch (InterruptedException e) { // We handle interrupt in the main thread. return; } Preconditions.checkState(artifacts.size() == stats.size(), "artifacts.size() == %s stats.size() == %s", artifacts.size(), stats.size()); for (int i = 0; i < artifacts.size(); i++) { Artifact artifact = artifacts.get(i); FileStatusWithDigest stat = stats.get(i); Pair<SkyKey, ActionExecutionValue> keyAndValue = fileToKeyAndValue.get(artifact); ActionExecutionValue actionValue = keyAndValue.getSecond(); SkyKey key = keyAndValue.getFirst(); FileValue lastKnownData = actionValue.getAllFileValues().get(artifact); try { FileValue newData = ActionMetadataHandler.fileValueFromArtifact(artifact, stat, tsgm); if (!newData.equals(lastKnownData)) { updateIntraBuildModifiedCounter(stat != null ? stat.getLastChangeTime() : -1, lastKnownData.isSymlink(), newData.isSymlink()); modifiedOutputFilesCounter.getAndIncrement(); dirtyKeys.add(key); } } catch (IOException e) { // This is an unexpected failure getting a digest or symlink target. modifiedOutputFilesCounter.getAndIncrement(); dirtyKeys.add(key); } } // Unfortunately, there exists no facility to batch list directories. // We must use direct filesystem calls. for (Map.Entry<Artifact, Pair<SkyKey, ActionExecutionValue>> entry : treeArtifactsToKeyAndValue.entrySet()) { Artifact artifact = entry.getKey(); if (treeArtifactIsDirty( entry.getKey(), entry.getValue().getSecond().getTreeArtifactValue(artifact))) { Path path = artifact.getPath(); // Count the changed directory as one "file". // TODO(bazel-team): There are no tests for this codepath. try { updateIntraBuildModifiedCounter(path.exists() ? path.getLastModifiedTime() : -1, false, path.isSymbolicLink()); } catch (IOException e) { // Do nothing here. } modifiedOutputFilesCounter.getAndIncrement(); dirtyKeys.add(entry.getValue().getFirst()); } } } }; } private void updateIntraBuildModifiedCounter(long time, boolean oldWasSymlink, boolean newIsSymlink) { if (lastExecutionTimeRange != null && lastExecutionTimeRange.contains(time) && !(oldWasSymlink && newIsSymlink)) { modifiedOutputFilesIntraBuildCounter.incrementAndGet(); } } private Runnable outputStatJob(final Collection<SkyKey> dirtyKeys, final List<Pair<SkyKey, ActionExecutionValue>> shard, final ImmutableSet<PathFragment> knownModifiedOutputFiles, final Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles) { return new Runnable() { @Override public void run() { for (Pair<SkyKey, ActionExecutionValue> keyAndValue : shard) { ActionExecutionValue value = keyAndValue.getSecond(); if (value == null || actionValueIsDirtyWithDirectSystemCalls( value, knownModifiedOutputFiles, sortedKnownModifiedOutputFiles)) { dirtyKeys.add(keyAndValue.getFirst()); } } } }; } /** * Returns the number of modified output files inside of dirty actions. */ int getNumberOfModifiedOutputFiles() { return modifiedOutputFilesCounter.get(); } /** Returns the number of modified output files that occur during the previous build. */ int getNumberOfModifiedOutputFilesDuringPreviousBuild() { return modifiedOutputFilesIntraBuildCounter.get(); } private boolean treeArtifactIsDirty(Artifact artifact, TreeArtifactValue value) { if (artifact.getPath().isSymbolicLink()) { // TreeArtifacts may not be symbolic links. return true; } // There doesn't appear to be any facility to batch list directories... we must // do things the 'slow' way. try { Set<PathFragment> currentDirectoryValue = TreeArtifactValue.explodeDirectory(artifact); Set<PathFragment> valuePaths = value.getChildPaths(); return !currentDirectoryValue.equals(valuePaths); } catch (IOException | TreeArtifactException e) { return true; } } private boolean actionValueIsDirtyWithDirectSystemCalls(ActionExecutionValue actionValue, ImmutableSet<PathFragment> knownModifiedOutputFiles, Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles) { boolean isDirty = false; for (Map.Entry<Artifact, FileValue> entry : actionValue.getAllFileValues().entrySet()) { Artifact file = entry.getKey(); FileValue lastKnownData = entry.getValue(); if (shouldCheckFile(knownModifiedOutputFiles, file)) { try { FileValue fileValue = ActionMetadataHandler.fileValueFromArtifact(file, null, tsgm); if (!fileValue.equals(lastKnownData)) { updateIntraBuildModifiedCounter(fileValue.exists() ? fileValue.realRootedPath().asPath().getLastModifiedTime() : -1, lastKnownData.isSymlink(), fileValue.isSymlink()); modifiedOutputFilesCounter.getAndIncrement(); isDirty = true; } } catch (IOException e) { // This is an unexpected failure getting a digest or symlink target. modifiedOutputFilesCounter.getAndIncrement(); isDirty = true; } } } for (Map.Entry<Artifact, TreeArtifactValue> entry : actionValue.getAllTreeArtifactValues().entrySet()) { Artifact artifact = entry.getKey(); if (shouldCheckTreeArtifact(sortedKnownModifiedOutputFiles.get(), artifact) && treeArtifactIsDirty(artifact, entry.getValue())) { Path path = artifact.getPath(); // Count the changed directory as one "file". try { updateIntraBuildModifiedCounter(path.exists() ? path.getLastModifiedTime() : -1, false, path.isSymbolicLink()); } catch (IOException e) { // Do nothing here. } modifiedOutputFilesCounter.getAndIncrement(); isDirty = true; } } return isDirty; } private static boolean shouldCheckFile(ImmutableSet<PathFragment> knownModifiedOutputFiles, Artifact artifact) { return knownModifiedOutputFiles == null || knownModifiedOutputFiles.contains(artifact.getExecPath()); } private static boolean shouldCheckTreeArtifact( @Nullable NavigableSet<PathFragment> knownModifiedOutputFiles, Artifact treeArtifact) { // If null, everything needs to be checked. if (knownModifiedOutputFiles == null) { return true; } // Here we do the following to see whether a TreeArtifact is modified: // 1. Sort the set of modified file paths in lexicographical order using TreeSet. // 2. Get the first modified output file path that is greater than or equal to the exec path of // the TreeArtifact to check. // 3. Check whether the returned file path contains the exec path of the TreeArtifact as a // prefix path. PathFragment artifactExecPath = treeArtifact.getExecPath(); PathFragment headPath = knownModifiedOutputFiles.ceiling(artifactExecPath); return headPath != null && headPath.startsWith(artifactExecPath); } private BatchDirtyResult getDirtyValues(ValueFetcher fetcher, Iterable<SkyKey> keys, final SkyValueDirtinessChecker checker, final boolean checkMissingValues) throws InterruptedException { ExecutorService executor = Executors.newFixedThreadPool( DIRTINESS_CHECK_THREADS, new ThreadFactoryBuilder().setNameFormat("FileSystem Value Invalidator %d").build()); final BatchDirtyResult batchResult = new BatchDirtyResult(); ThrowableRecordingRunnableWrapper wrapper = new ThrowableRecordingRunnableWrapper("FilesystemValueChecker#getDirtyValues"); final AtomicInteger numKeysScanned = new AtomicInteger(0); final AtomicInteger numKeysChecked = new AtomicInteger(0); ElapsedTimeReceiver elapsedTimeReceiver = new ElapsedTimeReceiver() { @Override public void accept(long elapsedTimeNanos) { if (elapsedTimeNanos > 0) { LOG.info(String.format("Spent %d ms checking %d filesystem nodes (%d scanned)", TimeUnit.MILLISECONDS.convert(elapsedTimeNanos, TimeUnit.NANOSECONDS), numKeysChecked.get(), numKeysScanned.get())); } } }; try (AutoProfiler prof = AutoProfiler.create(elapsedTimeReceiver)) { for (final SkyKey key : keys) { numKeysScanned.incrementAndGet(); if (!checker.applies(key)) { continue; } final SkyValue value = fetcher.get(key); if (!checkMissingValues && value == null) { continue; } executor.execute( wrapper.wrap( new Runnable() { @Override public void run() { numKeysChecked.incrementAndGet(); DirtyResult result = checker.check(key, value, tsgm); if (result.isDirty()) { batchResult.add(key, value, result.getNewValue()); } } })); } boolean interrupted = ExecutorUtil.interruptibleShutdown(executor); Throwables.propagateIfPossible(wrapper.getFirstThrownError()); if (interrupted) { throw new InterruptedException(); } } return batchResult; } /** * Result of a batch call to {@link SkyValueDirtinessChecker#check}. Partitions the dirty * values based on whether we have a new value available for them or not. */ private static class BatchDirtyResult implements Differencer.DiffWithDelta { private final Set<SkyKey> concurrentDirtyKeysWithoutNewValues = Collections.newSetFromMap(new ConcurrentHashMap<SkyKey, Boolean>()); private final ConcurrentHashMap<SkyKey, Delta> concurrentDirtyKeysWithNewAndOldValues = new ConcurrentHashMap<>(); private void add(SkyKey key, @Nullable SkyValue oldValue, @Nullable SkyValue newValue) { if (newValue == null) { concurrentDirtyKeysWithoutNewValues.add(key); } else { if (oldValue == null) { concurrentDirtyKeysWithNewAndOldValues.put(key, new Delta(newValue)); } else { concurrentDirtyKeysWithNewAndOldValues.put(key, new Delta(oldValue, newValue)); } } } @Override public Collection<SkyKey> changedKeysWithoutNewValues() { return concurrentDirtyKeysWithoutNewValues; } @Override public Map<SkyKey, Delta> changedKeysWithNewAndOldValues() { return concurrentDirtyKeysWithNewAndOldValues; } @Override public Map<SkyKey, SkyValue> changedKeysWithNewValues() { return Delta.newValues(concurrentDirtyKeysWithNewAndOldValues); } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.cloudstack.api.command.admin.vlan; import com.cloud.utils.net.NetUtils; import org.apache.log4j.Logger; import org.apache.cloudstack.api.APICommand; import org.apache.cloudstack.api.ApiConstants; import org.apache.cloudstack.api.ApiErrorCode; import org.apache.cloudstack.api.BaseCmd; import org.apache.cloudstack.api.Parameter; import org.apache.cloudstack.api.ServerApiException; import org.apache.cloudstack.api.response.DomainResponse; import org.apache.cloudstack.api.response.NetworkResponse; import org.apache.cloudstack.api.response.PhysicalNetworkResponse; import org.apache.cloudstack.api.response.PodResponse; import org.apache.cloudstack.api.response.ProjectResponse; import org.apache.cloudstack.api.response.VlanIpRangeResponse; import org.apache.cloudstack.api.response.ZoneResponse; import com.cloud.dc.Vlan; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.user.Account; @APICommand(name = "createVlanIpRange", description = "Creates a VLAN IP range.", responseObject = VlanIpRangeResponse.class, requestHasSensitiveInfo = false, responseHasSensitiveInfo = false) public class CreateVlanIpRangeCmd extends BaseCmd { public static final Logger s_logger = Logger.getLogger(CreateVlanIpRangeCmd.class.getName()); private static final String s_name = "createvlaniprangeresponse"; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @Parameter(name = ApiConstants.ACCOUNT, type = CommandType.STRING, description = "account who will own the VLAN. If VLAN is Zone wide, this parameter should be ommited") private String accountName; @Parameter(name = ApiConstants.PROJECT_ID, type = CommandType.UUID, entityType = ProjectResponse.class, description = "project who will own the VLAN. If VLAN is Zone wide, this parameter should be ommited") private Long projectId; @Parameter(name = ApiConstants.DOMAIN_ID, type = CommandType.UUID, entityType = DomainResponse.class, description = "domain ID of the account owning a VLAN") private Long domainId; @Parameter(name = ApiConstants.END_IP, type = CommandType.STRING, description = "the ending IP address in the VLAN IP range") private String endIp; @Parameter(name = ApiConstants.FOR_VIRTUAL_NETWORK, type = CommandType.BOOLEAN, description = "true if VLAN is of Virtual type, false if Direct") private Boolean forVirtualNetwork; @Parameter(name = ApiConstants.GATEWAY, type = CommandType.STRING, description = "the gateway of the VLAN IP range") private String gateway; @Parameter(name = ApiConstants.NETMASK, type = CommandType.STRING, description = "the netmask of the VLAN IP range") private String netmask; @Parameter(name = ApiConstants.POD_ID, type = CommandType.UUID, entityType = PodResponse.class, description = "optional parameter. Have to be specified for Direct Untagged vlan only.") private Long podId; @Parameter(name = ApiConstants.START_IP, type = CommandType.STRING, description = "the beginning IP address in the VLAN IP range") private String startIp; @Parameter(name = ApiConstants.VLAN, type = CommandType.STRING, description = "the ID or VID of the VLAN. If not specified," + " will be defaulted to the vlan of the network or if vlan of the network is null - to Untagged") private String vlan; @Parameter(name = ApiConstants.ZONE_ID, type = CommandType.UUID, entityType = ZoneResponse.class, description = "the Zone ID of the VLAN IP range") private Long zoneId; @Parameter(name = ApiConstants.NETWORK_ID, type = CommandType.UUID, entityType = NetworkResponse.class, description = "the network id") private Long networkID; @Parameter(name = ApiConstants.PHYSICAL_NETWORK_ID, type = CommandType.UUID, entityType = PhysicalNetworkResponse.class, description = "the physical network id") private Long physicalNetworkId; @Parameter(name = ApiConstants.START_IPV6, type = CommandType.STRING, description = "the beginning IPv6 address in the IPv6 network range") private String startIpv6; @Parameter(name = ApiConstants.END_IPV6, type = CommandType.STRING, description = "the ending IPv6 address in the IPv6 network range") private String endIpv6; @Parameter(name = ApiConstants.IP6_GATEWAY, type = CommandType.STRING, description = "the gateway of the IPv6 network. Required " + "for Shared networks and Isolated networks when it belongs to VPC") private String ip6Gateway; @Parameter(name = ApiConstants.IP6_CIDR, type = CommandType.STRING, description = "the CIDR of IPv6 network, must be at least /64") private String ip6Cidr; @Parameter(name = ApiConstants.FOR_SYSTEM_VMS, type = CommandType.BOOLEAN, description = "true if IP range is set to system vms, false if not") private Boolean forSystemVms; ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// public String getAccountName() { return accountName; } public Long getDomainId() { return domainId; } public String getEndIp() { return endIp; } public Boolean isForVirtualNetwork() { return forVirtualNetwork == null ? Boolean.TRUE : forVirtualNetwork; } public String getGateway() { return gateway; } public Boolean isForSystemVms() { return forSystemVms == null ? Boolean.FALSE : forSystemVms; } public String getNetmask() { return netmask; } public Long getPodId() { return podId; } public String getStartIp() { return startIp; } public String getVlan() { if (vlan == null || vlan.isEmpty()) { vlan = "untagged"; } return vlan; } public Long getZoneId() { return zoneId; } public Long getProjectId() { return projectId; } public String getStartIpv6() { if (startIpv6 == null) { return null; } return NetUtils.standardizeIp6Address(startIpv6); } public String getEndIpv6() { if (endIpv6 == null) { return null; } return NetUtils.standardizeIp6Address(endIpv6); } public String getIp6Gateway() { if (ip6Gateway == null) { return null; } return NetUtils.standardizeIp6Address(ip6Gateway); } public String getIp6Cidr() { if (ip6Cidr == null) { return null; } return NetUtils.standardizeIp6Cidr(ip6Cidr); } ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// public Long getNetworkID() { return networkID; } public Long getPhysicalNetworkId() { return physicalNetworkId; } @Override public String getCommandName() { return s_name; } @Override public long getEntityOwnerId() { return Account.ACCOUNT_ID_SYSTEM; } @Override public void execute() throws ResourceUnavailableException, ResourceAllocationException { try { Vlan result = _configService.createVlanAndPublicIpRange(this); if (result != null) { VlanIpRangeResponse response = _responseGenerator.createVlanIpRangeResponse(result); response.setResponseName(getCommandName()); this.setResponseObject(response); } else { throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to create vlan ip range"); } } catch (ConcurrentOperationException ex) { s_logger.warn("Exception: ", ex); throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, ex.getMessage()); } catch (InsufficientCapacityException ex) { s_logger.info(ex); throw new ServerApiException(ApiErrorCode.INSUFFICIENT_CAPACITY_ERROR, ex.getMessage()); } } }
package gov.nist.sip.proxy.gui; /* * ProxyLauncher.java * * Created on April 8, 2002, 10:10 AM */ import gov.nist.sip.proxy.Configuration; import gov.nist.sip.proxy.Proxy; import gov.nist.sip.proxy.ProxyDebug; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Container; import java.awt.FlowLayout; import java.awt.Font; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.swing.AbstractButton; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.border.BevelBorder; import javax.swing.border.Border; import javax.swing.border.EtchedBorder; /** * * @author olivier * @version 1.0 */ public class ProxyLauncher extends JFrame{ // Menus protected JMenuBar menuBar; protected JMenu menuMenu; protected JMenu optionsMenu; protected JMenu helpMenu; protected JMenu quit; protected JMenuItem configurationMenuItem; // The 2 panels: one for the labels and texts, the other for the buttons protected JPanel firstPanel; protected JPanel secondPanel; protected JPanel thirdPanel; protected JPanel fourthPanel; protected JButton proxyButton; protected JButton traceViewerButton; protected RegistrationsList registrationsList; protected Configuration configuration; protected ListenerProxy listenerProxy; private String configurationFile; protected Proxy proxy; // All for the container: protected static Color containerBackGroundColor=new Color(204,204,204); protected static String logo="logoNist-gray.jpg"; // All for the labels: protected static Border labelBorder=new EtchedBorder(EtchedBorder.RAISED); protected static Color labelBackGroundColor=new Color(217,221,221); // All for the TextField protected static Color textBackGroundColor=Color.white; // All for the Button protected static Border buttonBorder=new BevelBorder(BevelBorder.RAISED); protected static Color buttonBackGroundColor=new Color(186,175,175); public ProxyLauncher(String configFile) { super("NIST-SIP proxy interface"); System.out.println("Initialisation Proxy Interface"); try { if (configFile==null) { throw new Exception("ERROR, specify the configuration file on the"+ " command line."); } else configurationFile=configFile; // First thing to do, get the configurations. proxy=new Proxy(configurationFile); listenerProxy=new ListenerProxy(this); initComponents(); show(); } catch(Exception e) { e.printStackTrace(); } } public Proxy getProxy() { return proxy; } public void setProxy(Proxy proxy) { this.proxy=proxy; } public RegistrationsList getRegistrationsList() { return registrationsList; } public String getConfigurationFile() { return configurationFile; } public Configuration getConfiguration() { if (proxy!=null) return proxy.getConfiguration(); else return null; } public ListenerProxy getListenerProxy(){ return listenerProxy; } /*******************************************************************************/ /*******************************************************************************/ /*******************************************************************************/ public void initComponents() { /********************** The main container ****************************/ Container container=this.getContentPane(); container.setLayout(new BoxLayout(container,BoxLayout.Y_AXIS)); container.setBackground(containerBackGroundColor); // width, size: setSize(350,400); setLocation(0,0); addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { clean(); } }); /********************** Menu bar **************************************/ menuBar=new JMenuBar(); setJMenuBar(menuBar); // create a menu and add it to the menubar menuMenu=new JMenu(" Menu "); menuMenu.setBorder(new BevelBorder(BevelBorder.RAISED)); menuMenu.setToolTipText("Main menu of the proxy"); // create sub-menus belonging to the main menu configurationMenuItem=new JMenuItem("Configuration"); configurationMenuItem.setToolTipText("Configure the stack"); configurationMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { listenerProxy.configurationActionPerformed(evt); } } ); menuMenu.add(configurationMenuItem); // add the menu to the menu bar menuBar.add(menuMenu); //...create and add some menus... menuBar.add(Box.createHorizontalGlue()); helpMenu=new JMenu(" Help "); helpMenu.setBorder(new BevelBorder(BevelBorder.RAISED)); helpMenu.setToolTipText("Some useful notes about this tool"); helpMenu.addMouseListener( new MouseAdapter() { public void mouseClicked(MouseEvent evt){ listenerProxy.helpMenuMouseEvent(evt); } } ); menuBar.add(helpMenu); quit=new JMenu(" Quit "); quit.setBorder(new BevelBorder(BevelBorder.RAISED)); quit.setToolTipText("Quit the application"); quit.addMouseListener( new MouseAdapter() { public void mouseClicked(MouseEvent evt){ clean(); } } ); menuBar.add(quit); /*************************** Main Panel ********************************/ firstPanel=new JPanel(); // Top, left, bottom, right firstPanel.setBorder(BorderFactory.createEmptyBorder(20,20,20,20)); // If put to False: we see the container's background firstPanel.setOpaque(false); firstPanel.setBackground(Color.lightGray); //rows, columns, horizontalGap, verticalGap firstPanel.setLayout( new BorderLayout() ); container.add(firstPanel); JLabel registrationsLabel=new JLabel("Registrations:"); //registrationsLabel.setToolTipText("Click on a registration to get the contacts addresses!!"); // Alignment of the text registrationsLabel.setHorizontalAlignment(AbstractButton.CENTER); // Color of the text registrationsLabel.setForeground(Color.black); // Size of the text registrationsLabel.setFont(new Font ("Dialog", 1, 14)); // If put to true: we see the label's background registrationsLabel.setOpaque(true); registrationsLabel.setBackground(labelBackGroundColor); registrationsLabel.setBorder(labelBorder); firstPanel.add("North",registrationsLabel); registrationsList=new RegistrationsList(this); // registrationsList.setToolTipText("Double click on a registration to get the contacts addresses!!"); MouseListener mouseListener = new MouseAdapter() { public void mouseClicked(MouseEvent e) { // listenerProxy.registrationsListMouseClicked(e); } }; registrationsList.addMouseListener(mouseListener); JScrollPane scrollPane = new JScrollPane(registrationsList, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); firstPanel.add("Center",scrollPane); /*************************** Secondary Panel ********************************/ secondPanel=new JPanel(); secondPanel.setOpaque(false); secondPanel.setBorder(BorderFactory.createEmptyBorder(5,20,10,20)); container.add(secondPanel); // row, column, gap, gap secondPanel.setLayout( new GridLayout(1,2,5,5) ); proxyButton=new JButton("Start the proxy"); proxyButton.setToolTipText("Please, start/stop the proxy!!!"); proxyButton.setFont(new Font ("Dialog", 1, 14)); proxyButton.setFocusPainted(false); proxyButton.setBackground(buttonBackGroundColor); proxyButton.setBorder(buttonBorder); proxyButton.setVerticalAlignment(AbstractButton.CENTER); proxyButton.setHorizontalAlignment(AbstractButton.CENTER); secondPanel.add(proxyButton); proxyButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { listenerProxy.proxyActionPerformed(evt); } } ); traceViewerButton=new JButton("View the traces"); traceViewerButton.setToolTipText("The traces are waiting for you!!!"); traceViewerButton.setFont(new Font ("Dialog", 1, 14)); traceViewerButton.setFocusPainted(false); traceViewerButton.setBackground(buttonBackGroundColor); traceViewerButton.setBorder(buttonBorder); traceViewerButton.setVerticalAlignment(AbstractButton.CENTER); traceViewerButton.setHorizontalAlignment(AbstractButton.CENTER); secondPanel.add(traceViewerButton); traceViewerButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { listenerProxy.traceViewerActionPerformed(evt); } } ); JPanel imagesPanel=new JPanel(); imagesPanel.setOpaque(false); // top, left, bottom, right imagesPanel.setBorder(BorderFactory.createEmptyBorder(10,0,0,0)); container.add(imagesPanel); imagesPanel.setLayout( new FlowLayout(FlowLayout.CENTER,0,0) ); ImageIcon icon=new ImageIcon("./gui/images/"+logo); JLabel label=new JLabel(icon); label.setVisible(true); label.setToolTipText("What a spacey NIST logo!!!"); label.setHorizontalAlignment(AbstractButton.CENTER); label.setForeground(Color.black); label.setFont(new Font ("Dialog", 1, 14)); label.setOpaque(true); label.setBackground(Color.lightGray); imagesPanel.add(label); } public void clean() { // We kill the proxy: ProxyDebug.println("Proxy Clean up"); try { listenerProxy.stopProxy(); if (listenerProxy.rmiregistryProcess!=null) listenerProxy.rmiregistryProcess.destroy(); } catch(Exception e) { e.printStackTrace(); } System.exit(0); } /*************************************************************************/ /************ The main method: to launch the proxy *************/ /************************************************************************/ public static void main(String args[]) { try{ // the Proxy: String confFile= (String) args[1]; ProxyLauncher proxyLauncher= new ProxyLauncher(confFile); //proxyLauncher.start(); //ProxyDebug.println("Proxy ready to work"); System.out.println("in main ProxyLauncher"); } catch(Exception e) { System.out.println ("ERROR: Set the configuration file flag: " + "USE: -cf configuration_file_location.xml" ); System.out.println("ERROR, the proxy can not be started, " + " exception raised:\n"); e.printStackTrace(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mina2; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.charset.Charset; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.ExchangeTimedOutException; import org.apache.camel.ServicePoolAware; import org.apache.camel.converter.IOConverter; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.CamelLogger; import org.apache.camel.util.ExchangeHelper; import org.apache.mina.core.filterchain.DefaultIoFilterChainBuilder; import org.apache.mina.core.filterchain.IoFilter; import org.apache.mina.core.future.CloseFuture; import org.apache.mina.core.future.ConnectFuture; import org.apache.mina.core.service.IoConnector; import org.apache.mina.core.service.IoHandlerAdapter; import org.apache.mina.core.service.IoService; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.session.IoSessionConfig; import org.apache.mina.filter.codec.ProtocolCodecFactory; import org.apache.mina.filter.codec.ProtocolCodecFilter; import org.apache.mina.filter.codec.serialization.ObjectSerializationCodecFactory; import org.apache.mina.filter.codec.textline.LineDelimiter; import org.apache.mina.filter.executor.ExecutorFilter; import org.apache.mina.filter.executor.OrderedThreadPoolExecutor; import org.apache.mina.filter.executor.UnorderedThreadPoolExecutor; import org.apache.mina.filter.logging.LoggingFilter; import org.apache.mina.filter.ssl.SslFilter; import org.apache.mina.transport.socket.nio.NioDatagramConnector; import org.apache.mina.transport.socket.nio.NioSocketConnector; import org.apache.mina.transport.vmpipe.VmPipeAddress; import org.apache.mina.transport.vmpipe.VmPipeConnector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link org.apache.camel.Producer} implementation for MINA * * @version */ public class Mina2Producer extends DefaultProducer implements ServicePoolAware { private static final Logger LOG = LoggerFactory.getLogger(Mina2Producer.class); private IoSession session; private CountDownLatch latch; private boolean lazySessionCreation; private long timeout; private SocketAddress address; private IoConnector connector; private boolean sync; private CamelLogger noReplyLogger; private Mina2Configuration configuration; private IoSessionConfig connectorConfig; private ExecutorService workerPool; public Mina2Producer(Mina2Endpoint endpoint) throws Exception { super(endpoint); this.configuration = endpoint.getConfiguration(); this.lazySessionCreation = configuration.isLazySessionCreation(); this.timeout = configuration.getTimeout(); this.sync = configuration.isSync(); this.noReplyLogger = new CamelLogger(LOG, configuration.getNoReplyLogLevel()); String protocol = configuration.getProtocol(); if (protocol.equals("tcp")) { setupSocketProtocol(protocol); } else if (configuration.isDatagramProtocol()) { setupDatagramProtocol(protocol); } else if (protocol.equals("vm")) { setupVmProtocol(protocol); } } @Override public Mina2Endpoint getEndpoint() { return (Mina2Endpoint) super.getEndpoint(); } @Override public boolean isSingleton() { // the producer should not be singleton otherwise cannot use concurrent producers and safely // use request/reply with correct correlation return false; } public void process(Exchange exchange) throws Exception { try { doProcess(exchange); } finally { // ensure we always disconnect if configured maybeDisconnectOnDone(exchange); } } @SuppressWarnings("deprecation") protected void doProcess(Exchange exchange) throws Exception { if (session == null && !lazySessionCreation) { throw new IllegalStateException("Not started yet!"); } if (session == null || !session.isConnected()) { openConnection(); } // set the exchange encoding property if (getEndpoint().getConfiguration().getCharsetName() != null) { exchange.setProperty(Exchange.CHARSET_NAME, IOConverter.normalizeCharset(getEndpoint().getConfiguration().getCharsetName())); } Object body = Mina2PayloadHelper.getIn(getEndpoint(), exchange); if (body == null) { noReplyLogger.log("No payload to send for exchange: " + exchange); return; // exit early since nothing to write } // if textline enabled then covert to a String which must be used for textline if (getEndpoint().getConfiguration().isTextline()) { body = getEndpoint().getCamelContext().getTypeConverter().mandatoryConvertTo(String.class, exchange, body); } // if sync is true then we should also wait for a response (synchronous mode) if (sync) { // only initialize latch if we should get a response latch = new CountDownLatch(1); // reset handler if we expect a response ResponseHandler handler = (ResponseHandler) session.getHandler(); handler.reset(); } // log what we are writing if (LOG.isDebugEnabled()) { Object out = body; if (body instanceof byte[]) { // byte arrays is not readable so convert to string out = exchange.getContext().getTypeConverter().convertTo(String.class, body); } LOG.debug("Writing body: {}", out); } // write the body Mina2Helper.writeBody(session, body, exchange); if (sync) { // wait for response, consider timeout LOG.debug("Waiting for response using timeout {} millis.", timeout); boolean done = latch.await(timeout, TimeUnit.MILLISECONDS); if (!done) { throw new ExchangeTimedOutException(exchange, timeout); } // did we get a response ResponseHandler handler = (ResponseHandler) session.getHandler(); if (handler.getCause() != null) { throw new CamelExchangeException("Error occurred in ResponseHandler", exchange, handler.getCause()); } else if (!handler.isMessageReceived()) { // no message received throw new ExchangeTimedOutException(exchange, timeout); } else { // set the result on either IN or OUT on the original exchange depending on its pattern if (ExchangeHelper.isOutCapable(exchange)) { Mina2PayloadHelper.setOut(exchange, handler.getMessage()); } else { Mina2PayloadHelper.setIn(exchange, handler.getMessage()); } } } } protected void maybeDisconnectOnDone(Exchange exchange) { if (session == null) { return; } // should session be closed after complete? Boolean close; if (ExchangeHelper.isOutCapable(exchange)) { close = exchange.getOut().getHeader(Mina2Constants.MINA_CLOSE_SESSION_WHEN_COMPLETE, Boolean.class); } else { close = exchange.getIn().getHeader(Mina2Constants.MINA_CLOSE_SESSION_WHEN_COMPLETE, Boolean.class); } // should we disconnect, the header can override the configuration boolean disconnect = getEndpoint().getConfiguration().isDisconnect(); if (close != null) { disconnect = close; } if (disconnect) { LOG.debug("Closing session when complete at address: {}", address); session.close(true); } } public DefaultIoFilterChainBuilder getFilterChain() { return connector.getFilterChain(); } @Override protected void doStart() throws Exception { super.doStart(); if (!lazySessionCreation) { openConnection(); } } @Override protected void doStop() throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("Stopping connector: {} at address: {}", connector, address); } closeConnection(); super.doStop(); } @Override protected void doShutdown() throws Exception { if (workerPool != null) { workerPool.shutdown(); } super.doShutdown(); } private void closeConnection() { if (session != null) { CloseFuture closeFuture = session.close(true); closeFuture.awaitUninterruptibly(); } connector.dispose(true); } private void openConnection() { if (LOG.isDebugEnabled()) { LOG.debug("Creating connector to address: {} using connector: {} timeout: {} millis.", new Object[]{address, connector, timeout}); } // connect and wait until the connection is established if (connectorConfig != null) { connector.getSessionConfig().setAll(connectorConfig); } connector.setHandler(new ResponseHandler()); ConnectFuture future = connector.connect(address); future.awaitUninterruptibly(); session = future.getSession(); } // Implementation methods //------------------------------------------------------------------------- protected void setupVmProtocol(String uri) { boolean minaLogger = configuration.isMinaLogger(); List<IoFilter> filters = configuration.getFilters(); address = new VmPipeAddress(configuration.getPort()); connector = new VmPipeConnector(); // connector config if (minaLogger) { connector.getFilterChain().addLast("logger", new LoggingFilter()); } appendIoFiltersToChain(filters, connector.getFilterChain()); if (configuration.getSslContextParameters() != null) { LOG.warn("Using vm protocol" + ", but an SSLContextParameters instance was provided. SSLContextParameters is only supported on the TCP protocol."); } configureCodecFactory("Mina2Producer", connector); } protected void setupSocketProtocol(String uri) throws Exception { boolean minaLogger = configuration.isMinaLogger(); long timeout = configuration.getTimeout(); List<IoFilter> filters = configuration.getFilters(); address = new InetSocketAddress(configuration.getHost(), configuration.getPort()); final int processorCount = Runtime.getRuntime().availableProcessors() + 1; connector = new NioSocketConnector(processorCount); // connector config connectorConfig = connector.getSessionConfig(); if (configuration.isOrderedThreadPoolExecutor()) { workerPool = new OrderedThreadPoolExecutor(configuration.getMaximumPoolSize()); } else { workerPool = new UnorderedThreadPoolExecutor(configuration.getMaximumPoolSize()); } connector.getFilterChain().addLast("threadPool", new ExecutorFilter(workerPool)); if (minaLogger) { connector.getFilterChain().addLast("logger", new LoggingFilter()); } appendIoFiltersToChain(filters, connector.getFilterChain()); if (configuration.getSslContextParameters() != null) { connector.getFilterChain().addFirst("sslFilter", new SslFilter(configuration.getSslContextParameters().createSSLContext(), configuration.isAutoStartTls())); } configureCodecFactory("Mina2Producer", connector); connector.setConnectTimeoutMillis(timeout); } protected void configureCodecFactory(String type, IoService service) { if (configuration.getCodec() != null) { addCodecFactory(service, configuration.getCodec()); } else if (configuration.isAllowDefaultCodec()) { configureDefaultCodecFactory(type, service); } } protected void configureDefaultCodecFactory(String type, IoService service) { if (configuration.isTextline()) { Charset charset = getEncodingParameter(type, configuration); LineDelimiter delimiter = getLineDelimiterParameter(configuration.getTextlineDelimiter()); Mina2TextLineCodecFactory codecFactory = new Mina2TextLineCodecFactory(charset, delimiter); if (configuration.getEncoderMaxLineLength() > 0) { codecFactory.setEncoderMaxLineLength(configuration.getEncoderMaxLineLength()); } if (configuration.getDecoderMaxLineLength() > 0) { codecFactory.setDecoderMaxLineLength(configuration.getDecoderMaxLineLength()); } addCodecFactory(service, codecFactory); LOG.debug("{}: Using TextLineCodecFactory: {} using encoding: {} line delimiter: {}({})", new Object[]{type, codecFactory, charset, configuration.getTextlineDelimiter(), delimiter}); LOG.debug("Encoder maximum line length: {}. Decoder maximum line length: {}", codecFactory.getEncoderMaxLineLength(), codecFactory.getDecoderMaxLineLength()); } else { ObjectSerializationCodecFactory codecFactory = new ObjectSerializationCodecFactory(); addCodecFactory(service, codecFactory); LOG.debug("{}: Using ObjectSerializationCodecFactory: {}", type, codecFactory); } } protected void setupDatagramProtocol(String uri) { boolean minaLogger = configuration.isMinaLogger(); boolean transferExchange = configuration.isTransferExchange(); List<IoFilter> filters = configuration.getFilters(); if (transferExchange) { throw new IllegalArgumentException("transferExchange=true is not supported for datagram protocol"); } address = new InetSocketAddress(configuration.getHost(), configuration.getPort()); final int processorCount = Runtime.getRuntime().availableProcessors() + 1; connector = new NioDatagramConnector(processorCount); if (configuration.isOrderedThreadPoolExecutor()) { workerPool = new OrderedThreadPoolExecutor(configuration.getMaximumPoolSize()); } else { workerPool = new UnorderedThreadPoolExecutor(configuration.getMaximumPoolSize()); } connectorConfig = connector.getSessionConfig(); connector.getFilterChain().addLast("threadPool", new ExecutorFilter(workerPool)); if (minaLogger) { connector.getFilterChain().addLast("logger", new LoggingFilter()); } appendIoFiltersToChain(filters, connector.getFilterChain()); if (configuration.getSslContextParameters() != null) { LOG.warn("Using datagram protocol, " + configuration.getProtocol() + ", but an SSLContextParameters instance was provided. SSLContextParameters is only supported on the TCP protocol."); } configureDataGramCodecFactory("Mina2Producer", connector, configuration); // set connect timeout to mina in seconds connector.setConnectTimeoutMillis(timeout); } /** * For datagrams the entire message is available as a single IoBuffer so lets just pass those around by default * and try converting whatever they payload is into IoBuffer unless some custom converter is specified */ protected void configureDataGramCodecFactory(final String type, final IoService service, final Mina2Configuration configuration) { ProtocolCodecFactory codecFactory = configuration.getCodec(); if (codecFactory == null) { codecFactory = new Mina2UdpProtocolCodecFactory(this.getEndpoint().getCamelContext()); if (LOG.isDebugEnabled()) { LOG.debug("{}: Using CodecFactory: {}", new Object[]{type, codecFactory}); } } addCodecFactory(service, codecFactory); } private void addCodecFactory(IoService service, ProtocolCodecFactory codecFactory) { LOG.debug("addCodecFactory name: {}", codecFactory.getClass().getName()); service.getFilterChain().addLast("codec", new ProtocolCodecFilter(codecFactory)); } private static LineDelimiter getLineDelimiterParameter(Mina2TextLineDelimiter delimiter) { if (delimiter == null) { return LineDelimiter.DEFAULT; } switch (delimiter) { case DEFAULT: return LineDelimiter.DEFAULT; case AUTO: return LineDelimiter.AUTO; case UNIX: return LineDelimiter.UNIX; case WINDOWS: return LineDelimiter.WINDOWS; case MAC: return LineDelimiter.MAC; default: throw new IllegalArgumentException("Unknown textline delimiter: " + delimiter); } } private Charset getEncodingParameter(String type, Mina2Configuration configuration) { String encoding = configuration.getEncoding(); if (encoding == null) { encoding = Charset.defaultCharset().name(); // set in on configuration so its updated configuration.setEncoding(encoding); LOG.debug("{}: No encoding parameter using default charset: {}", type, encoding); } if (!Charset.isSupported(encoding)) { throw new IllegalArgumentException("The encoding: " + encoding + " is not supported"); } return Charset.forName(encoding); } private void appendIoFiltersToChain(List<IoFilter> filters, DefaultIoFilterChainBuilder filterChain) { if (filters != null && filters.size() > 0) { for (IoFilter ioFilter : filters) { filterChain.addLast(ioFilter.getClass().getCanonicalName(), ioFilter); } } } /** * Handles response from session writes */ private final class ResponseHandler extends IoHandlerAdapter { private Object message; private Throwable cause; private boolean messageReceived; public void reset() { this.message = null; this.cause = null; this.messageReceived = false; } @Override public void messageReceived(IoSession ioSession, Object message) throws Exception { LOG.debug("Message received: {}", message); this.message = message; messageReceived = true; cause = null; countDown(); } protected void countDown() { CountDownLatch downLatch = latch; if (downLatch != null) { downLatch.countDown(); } } @Override public void sessionClosed(IoSession session) throws Exception { if (sync && !messageReceived) { // sync=true (InOut mode) so we expected a message as reply but did not get one before the session is closed LOG.debug("Session closed but no message received from address: {}", address); // session was closed but no message received. This could be because the remote server had an internal error // and could not return a response. We should count down to stop waiting for a response countDown(); } } @Override public void exceptionCaught(IoSession ioSession, Throwable cause) { LOG.error("Exception on receiving message from address: " + address + " using connector: " + connector, cause); this.message = null; this.messageReceived = false; this.cause = cause; if (ioSession != null) { ioSession.close(true); } } public Throwable getCause() { return this.cause; } public Object getMessage() { return this.message; } public boolean isMessageReceived() { return messageReceived; } } }
package com.fsck.k9.mail.store.webdav; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import com.fsck.k9.mail.CertificateValidationException; import com.fsck.k9.mail.ConnectionSecurity; import com.fsck.k9.mail.Folder; import com.fsck.k9.mail.K9MailLib; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.ServerSettings; import com.fsck.k9.mail.filter.Base64; import com.fsck.k9.mail.store.RemoteStore; import com.fsck.k9.mail.store.StoreConfig; import javax.net.ssl.SSLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.CookieStore; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.protocol.ClientContext; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.message.BasicNameValuePair; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.HttpContext; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import timber.log.Timber; import static com.fsck.k9.mail.K9MailLib.DEBUG_PROTOCOL_WEBDAV; import static com.fsck.k9.mail.helper.UrlEncodingHelper.decodeUtf8; /** * <pre> * Uses WebDAV formatted HTTP calls to an MS Exchange server to fetch email * and email information. * </pre> */ @SuppressWarnings("deprecation") public class WebDavStore extends RemoteStore { public static WebDavStoreSettings decodeUri(String uri) { return WebDavStoreUriDecoder.decode(uri); } public static String createUri(ServerSettings server) { return WebDavStoreUriCreator.create(server); } private ConnectionSecurity mConnectionSecurity; private String username; private String alias; private String password; private String baseUrl; private String hostname; private int port; private String path; private String formBasedAuthPath; private String mailboxPath; private final WebDavHttpClient.WebDavHttpClientFactory httpClientFactory; private WebDavHttpClient httpClient = null; private HttpContext httpContext = null; private String authString; private CookieStore authCookies = null; private short authenticationType = WebDavConstants.AUTH_TYPE_NONE; private String cachedLoginUrl; private Folder sendFolder = null; private Map<String, WebDavFolder> folderList = new HashMap<>(); public WebDavStore(StoreConfig storeConfig, WebDavHttpClient.WebDavHttpClientFactory clientFactory) throws MessagingException { super(storeConfig, null); httpClientFactory = clientFactory; WebDavStoreSettings settings; try { settings = WebDavStore.decodeUri(storeConfig.getStoreUri()); } catch (IllegalArgumentException e) { throw new MessagingException("Error while decoding store URI", e); } hostname = settings.host; port = settings.port; mConnectionSecurity = settings.connectionSecurity; username = settings.username; password = settings.password; alias = settings.alias; path = settings.path; formBasedAuthPath = settings.authPath; mailboxPath = settings.mailboxPath; if (path == null || path.equals("")) { path = "/Exchange"; } else if (!path.startsWith("/")) { path = "/" + path; } if (mailboxPath == null || mailboxPath.equals("")) { mailboxPath = "/" + alias; } else if (!mailboxPath.startsWith("/")) { mailboxPath = "/" + mailboxPath; } if (formBasedAuthPath != null && !formBasedAuthPath.equals("") && !formBasedAuthPath.startsWith("/")) { formBasedAuthPath = "/" + formBasedAuthPath; } // The URL typically looks like the following: "https://mail.domain.com/Exchange/alias". // The inbox path would look like: "https://mail.domain.com/Exchange/alias/Inbox". baseUrl = getRoot() + path + mailboxPath; authString = "Basic " + Base64.encode(username + ":" + password); } private String getRoot() { String root; if (mConnectionSecurity == ConnectionSecurity.SSL_TLS_REQUIRED) { root = "https"; } else { root = "http"; } root += "://" + hostname + ":" + port; return root; } HttpContext getHttpContext() { return httpContext; } short getAuthentication() { return authenticationType; } StoreConfig getStoreConfig() { return mStoreConfig; } @Override public void checkSettings() throws MessagingException { authenticate(); } @Override public List<? extends Folder> getPersonalNamespaces(boolean forceListAll) throws MessagingException { List<Folder> folderList = new LinkedList<>(); /* * We have to check authentication here so we have the proper URL stored */ getHttpClient(); /* * Firstly we get the "special" folders list (inbox, outbox, etc) * and setup the account accordingly */ Map<String, String> headers = new HashMap<>(); headers.put("Depth", "0"); headers.put("Brief", "t"); DataSet dataset = processRequest(this.baseUrl, "PROPFIND", getSpecialFoldersList(), headers); Map<String, String> specialFoldersMap = dataset.getSpecialFolderToUrl(); String folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_INBOX_FOLDER)); if (folderName != null) { mStoreConfig.setAutoExpandFolderName(folderName); mStoreConfig.setInboxFolderName(folderName); } folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_DRAFTS_FOLDER)); if (folderName != null) { mStoreConfig.setDraftsFolderName(folderName); } folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_TRASH_FOLDER)); if (folderName != null) { mStoreConfig.setTrashFolderName(folderName); } folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SPAM_FOLDER)); if (folderName != null) { mStoreConfig.setSpamFolderName(folderName); } // K-9 Mail's outbox is a special local folder and different from Exchange/WebDAV's outbox. /* folderName = getFolderName(specialFoldersMap.get(DAV_MAIL_OUTBOX_FOLDER)); if (folderName != null) mAccount.setOutboxFolderName(folderName); */ folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SENT_FOLDER)); if (folderName != null) { mStoreConfig.setSentFolderName(folderName); } /* * Next we get all the folders (including "special" ones) */ headers = new HashMap<>(); headers.put("Brief", "t"); dataset = processRequest(this.baseUrl, "SEARCH", getFolderListXml(), headers); String[] folderUrls = dataset.getHrefs(); for (String tempUrl : folderUrls) { WebDavFolder folder = createFolder(tempUrl); if (folder != null) { folderList.add(folder); } } return folderList; } /** * Creates a folder using the URL passed as parameter (only if it has not been * already created) and adds this to our store folder map. * * @param folderUrl * URL * * @return WebDAV remote folder */ private WebDavFolder createFolder(String folderUrl) { if (folderUrl == null) { return null; } WebDavFolder wdFolder = null; String folderName = getFolderName(folderUrl); if (folderName != null) { wdFolder = getFolder(folderName); if (wdFolder != null) { wdFolder.setUrl(folderUrl); } } // else: Unknown URL format => NO Folder created return wdFolder; } private String getFolderName(String folderUrl) { if (folderUrl == null) { return null; } // Here we extract the folder name starting from the complete url. // folderUrl is in the form http://mail.domain.com/exchange/username/foldername // so we need "foldername" which is the string after the fifth slash int folderSlash = -1; for (int j = 0; j < 5; j++) { folderSlash = folderUrl.indexOf('/', folderSlash + 1); if (folderSlash < 0) { break; } } if (folderSlash > 0) { String fullPathName; // Removes the final slash if present if (folderUrl.charAt(folderUrl.length() - 1) == '/') { fullPathName = folderUrl.substring(folderSlash + 1, folderUrl.length() - 1); } else { fullPathName = folderUrl.substring(folderSlash + 1); } // Decodes the url-encoded folder name (i.e. "My%20folder" => "My Folder" return decodeUtf8(fullPathName); } return null; } @Override public WebDavFolder getFolder(String name) { WebDavFolder folder = this.folderList.get(name); if (folder == null) { folder = new WebDavFolder(this, name); folderList.put(name, folder); } return folder; } private Folder getSendSpoolFolder() { if (sendFolder == null) { sendFolder = getFolder(WebDavConstants.DAV_MAIL_SEND_FOLDER); } return sendFolder; } @Override public boolean isMoveCapable() { return true; } @Override public boolean isCopyCapable() { return true; } private String getSpecialFoldersList() { return "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>" + "<propfind xmlns=\"DAV:\">" + "<prop>" + "<" + WebDavConstants.DAV_MAIL_INBOX_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + "<" + WebDavConstants.DAV_MAIL_DRAFTS_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + "<" + WebDavConstants.DAV_MAIL_OUTBOX_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + "<" + WebDavConstants.DAV_MAIL_SENT_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + "<" + WebDavConstants.DAV_MAIL_TRASH_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + "<" + WebDavConstants.DAV_MAIL_SPAM_FOLDER + " xmlns=\"urn:schemas:httpmail:\"/>" + // This should always be ##DavMailSubmissionURI## for which we already have a constant // "<sendmsg xmlns=\"urn:schemas:httpmail:\"/>" + "</prop>" + "</propfind>"; } /*************************************************************** * WebDAV XML Request body retrieval functions */ private String getFolderListXml() { return "<?xml version='1.0' ?>" + "<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n" + "SELECT \"DAV:uid\", \"DAV:ishidden\"\r\n" + " FROM SCOPE('deep traversal of \"" + this.baseUrl + "\"')\r\n" + " WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=True\r\n" + "</a:sql></a:searchrequest>\r\n"; } String getMessageCountXml(String messageState) { return "<?xml version='1.0' ?>" + "<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n" + "SELECT \"DAV:visiblecount\"\r\n" + " FROM \"\"\r\n" + " WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND \"urn:schemas:httpmail:read\"=" + messageState + "\r\n" + " GROUP BY \"DAV:ishidden\"\r\n" + "</a:sql></a:searchrequest>\r\n"; } String getMessageEnvelopeXml(String[] uids) { StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"DAV:uid\", \"DAV:getcontentlength\","); buffer.append(" \"urn:schemas:mailheader:mime-version\","); buffer.append(" \"urn:schemas:mailheader:content-type\","); buffer.append(" \"urn:schemas:mailheader:subject\","); buffer.append(" \"urn:schemas:mailheader:date\","); buffer.append(" \"urn:schemas:mailheader:thread-topic\","); buffer.append(" \"urn:schemas:mailheader:thread-index\","); buffer.append(" \"urn:schemas:mailheader:from\","); buffer.append(" \"urn:schemas:mailheader:to\","); buffer.append(" \"urn:schemas:mailheader:in-reply-to\","); buffer.append(" \"urn:schemas:mailheader:cc\","); buffer.append(" \"urn:schemas:httpmail:read\""); buffer.append(" \r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessagesXml() { return "<?xml version='1.0' ?>" + "<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n" + "SELECT \"DAV:uid\"\r\n" + " FROM \"\"\r\n" + " WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False\r\n" + "</a:sql></a:searchrequest>\r\n"; } String getMessageUrlsXml(String[] uids) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessageFlagsXml(String[] uids) throws MessagingException { if (uids.length == 0) { throw new MessagingException("Attempt to get flags on 0 length array for uids"); } StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMarkMessagesReadXml(String[] urls, boolean read) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:propertyupdate xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("<a:set>\r\n"); buffer.append(" <a:prop>\r\n"); buffer.append(" <b:read>").append(read ? "1" : "0").append("</b:read>\r\n"); buffer.append(" </a:prop>\r\n"); buffer.append("</a:set>\r\n"); buffer.append("</a:propertyupdate>\r\n"); return buffer.toString(); } // For flag: // http://www.devnewsgroups.net/group/microsoft.public.exchange.development/topic27175.aspx // "<m:0x10900003>1</m:0x10900003>" & _ String getMoveOrCopyMessagesReadXml(String[] urls, boolean isMove) { String action = (isMove ? "move" : "copy"); StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:").append(action).append(" xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("</a:").append(action).append(">\r\n"); return buffer.toString(); } private boolean authenticate() throws MessagingException { try { if (authenticationType == WebDavConstants.AUTH_TYPE_NONE) { ConnectionInfo info = doInitialConnection(); if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_BASIC) { HttpGeneric request = new HttpGeneric(baseUrl); request.setMethod("GET"); request.setHeader("Authorization", authString); WebDavHttpClient httpClient = getHttpClient(); HttpResponse response = httpClient.executeOverride(request, httpContext); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode >= 200 && statusCode < 300) { authenticationType = WebDavConstants.AUTH_TYPE_BASIC; } else if (statusCode == 401) { throw new MessagingException("Invalid username or password for authentication."); } else { throw new MessagingException("Error with code " + response.getStatusLine().getStatusCode() + " during request processing: " + response.getStatusLine().toString()); } } else if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_FORM_BASED) { performFormBasedAuthentication(info); } } else if (authenticationType == WebDavConstants.AUTH_TYPE_BASIC) { // Nothing to do, we authenticate with every request when // using basic authentication. } else if (authenticationType == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. performFormBasedAuthentication(null); } } catch (IOException ioe) { Timber.e(ioe, "Error during authentication"); throw new MessagingException("Error during authentication", ioe); } return authenticationType != WebDavConstants.AUTH_TYPE_NONE; } private ConnectionInfo doInitialConnection() throws MessagingException { // For our initial connection we are sending an empty GET request to // the configured URL, which should be in the following form: // https://mail.server.com/Exchange/alias // // Possible status codes include: // 401 - the server uses basic authentication // 30x - the server is trying to redirect us to an OWA login // 20x - success // // The latter two indicate form-based authentication. ConnectionInfo info = new ConnectionInfo(); WebDavHttpClient httpClient = getHttpClient(); HttpGeneric request = new HttpGeneric(baseUrl); request.setMethod("GET"); try { HttpResponse response = httpClient.executeOverride(request, httpContext); info.statusCode = response.getStatusLine().getStatusCode(); if (info.statusCode == 401) { // 401 is the "Unauthorized" status code, meaning the server wants // an authentication header for basic authentication. info.requiredAuthType = WebDavConstants.AUTH_TYPE_BASIC; } else if ((info.statusCode >= 200 && info.statusCode < 300) || // Success (info.statusCode >= 300 && info.statusCode < 400) || // Redirect (info.statusCode == 440)) { // Unauthorized // We will handle all 3 situations the same. First we take an educated // guess at where the authorization DLL is located. If this is this // doesn't work, then we'll use the redirection URL for OWA login given // to us by exchange. We can use this to scrape the location of the // authorization URL. info.requiredAuthType = WebDavConstants.AUTH_TYPE_FORM_BASED; if (formBasedAuthPath != null && !formBasedAuthPath.equals("")) { // The user specified their own authentication path, use that. info.guessedAuthUrl = getRoot() + formBasedAuthPath; } else { // Use the default path to the authentication dll. info.guessedAuthUrl = getRoot() + "/exchweb/bin/auth/owaauth.dll"; } // Determine where the server is trying to redirect us. Header location = response.getFirstHeader("Location"); if (location != null) { info.redirectUrl = location.getValue(); } } else { throw new IOException("Error with code " + info.statusCode + " during request processing: " + response.getStatusLine().toString()); } } catch (SSLException e) { throw new CertificateValidationException(e.getMessage(), e); } catch (IOException ioe) { Timber.e(ioe, "IOException during initial connection"); throw new MessagingException("IOException", ioe); } return info; } private void performFormBasedAuthentication(ConnectionInfo info) throws IOException, MessagingException { // Clear out cookies from any previous authentication. if (authCookies != null) { authCookies.clear(); } WebDavHttpClient httpClient = getHttpClient(); String loginUrl; if (info != null) { loginUrl = info.guessedAuthUrl; } else if (cachedLoginUrl != null && !cachedLoginUrl.equals("")) { loginUrl = cachedLoginUrl; } else { throw new MessagingException("No valid login URL available for form-based authentication."); } HttpGeneric request = new HttpGeneric(loginUrl); request.setMethod("POST"); // Build the POST data. List<BasicNameValuePair> pairs = new ArrayList<>(); pairs.add(new BasicNameValuePair("destination", baseUrl)); pairs.add(new BasicNameValuePair("username", username)); pairs.add(new BasicNameValuePair("password", password)); pairs.add(new BasicNameValuePair("flags", "0")); pairs.add(new BasicNameValuePair("SubmitCreds", "Log+On")); pairs.add(new BasicNameValuePair("forcedownlevel", "0")); pairs.add(new BasicNameValuePair("trusted", "0")); UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(pairs); request.setEntity(formEntity); HttpResponse response = httpClient.executeOverride(request, httpContext); boolean authenticated = testAuthenticationResponse(response); if (!authenticated) { // Check the response from the authentication request above for a form action. String formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); if (formAction == null) { // If there is no form action, try using our redirect URL from the initial connection. if (info != null && info.redirectUrl != null && !info.redirectUrl.equals("")) { loginUrl = info.redirectUrl; request = new HttpGeneric(loginUrl); request.setMethod("GET"); response = httpClient.executeOverride(request, httpContext); formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); } } if (formAction != null) { try { URI formActionUri = new URI(formAction); URI loginUri = new URI(loginUrl); if (formActionUri.isAbsolute()) { // The form action is an absolute URL, just use it. loginUrl = formAction; } else { // Append the form action to our current URL, minus the file name. String urlPath; if (formAction.startsWith("/")) { urlPath = formAction; } else { urlPath = loginUri.getPath(); int lastPathPos = urlPath.lastIndexOf('/'); if (lastPathPos > -1) { urlPath = urlPath.substring(0, lastPathPos + 1); urlPath = urlPath.concat(formAction); } } // Reconstruct the login URL based on the original login URL and the form action. URI finalUri = new URI(loginUri.getScheme(), loginUri.getUserInfo(), loginUri.getHost(), loginUri.getPort(), urlPath, null, null); loginUrl = finalUri.toString(); } // Retry the login using our new URL. request = new HttpGeneric(loginUrl); request.setMethod("POST"); request.setEntity(formEntity); response = httpClient.executeOverride(request, httpContext); authenticated = testAuthenticationResponse(response); } catch (URISyntaxException e) { Timber.e(e, "URISyntaxException caught"); throw new MessagingException("URISyntaxException caught", e); } } else { throw new MessagingException("A valid URL for Exchange authentication could not be found."); } } if (authenticated) { authenticationType = WebDavConstants.AUTH_TYPE_FORM_BASED; cachedLoginUrl = loginUrl; } else { throw new MessagingException("Invalid credentials provided for authentication."); } } private String findFormAction(InputStream istream) throws IOException { String formAction = null; BufferedReader reader = new BufferedReader(new InputStreamReader(istream), 4096); String tempText; //TODO: Use proper HTML parsing for this // Read line by line until we find something like: <form action="owaauth.dll"...>. tempText = reader.readLine(); while (formAction == null) { if (tempText.contains(" action=")) { String[] actionParts = tempText.split(" action="); if (actionParts.length > 1 && actionParts[1].length() > 1) { char openQuote = actionParts[1].charAt(0); int closePos = actionParts[1].indexOf(openQuote, 1); if (closePos > 1) { formAction = actionParts[1].substring(1, closePos); // Remove any GET parameters. int quesPos = formAction.indexOf('?'); if (quesPos != -1) { formAction = formAction.substring(0, quesPos); } } } } tempText = reader.readLine(); } return formAction; } private boolean testAuthenticationResponse(HttpResponse response) throws MessagingException { boolean authenticated = false; int statusCode = response.getStatusLine().getStatusCode(); // Exchange 2007 will return a 302 status code no matter what. if (((statusCode >= 200 && statusCode < 300) || statusCode == 302) && authCookies != null && !authCookies.getCookies().isEmpty()) { // We may be authenticated, we need to send a test request to know for sure. // Exchange 2007 adds the same cookies whether the username and password were valid or not. ConnectionInfo info = doInitialConnection(); if (info.statusCode >= 200 && info.statusCode < 300) { authenticated = true; } else if (info.statusCode == 302) { // If we are successfully authenticated, Exchange will try to redirect us to our OWA inbox. // Otherwise, it will redirect us to a logon page. // Our URL is in the form: https://hostname:port/Exchange/alias. // The redirect is in the form: https://hostname:port/owa/alias. // Do a simple replace and compare the resulting strings. try { String thisPath = new URI(baseUrl).getPath(); String redirectPath = new URI(info.redirectUrl).getPath(); if (!thisPath.endsWith("/")) { thisPath = thisPath.concat("/"); } if (!redirectPath.endsWith("/")) { redirectPath = redirectPath.concat("/"); } if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } else { int found = thisPath.indexOf('/', 1); if (found != -1) { String replace = thisPath.substring(0, found + 1); redirectPath = redirectPath.replace("/owa/", replace); if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } } } } catch (URISyntaxException e) { Timber.e(e, "URISyntaxException"); throw new MessagingException("URISyntaxException caught", e); } } } return authenticated; } public CookieStore getAuthCookies() { return authCookies; } public String getAlias() { return alias; } public String getUrl() { return baseUrl; } public WebDavHttpClient getHttpClient() throws MessagingException { if (httpClient == null) { httpClient = httpClientFactory.create(); // Disable automatic redirects on the http client. httpClient.getParams().setBooleanParameter("http.protocol.handle-redirects", false); // Setup a cookie store for forms-based authentication. httpContext = new BasicHttpContext(); authCookies = new BasicCookieStore(); httpContext.setAttribute(ClientContext.COOKIE_STORE, authCookies); SchemeRegistry reg = httpClient.getConnectionManager().getSchemeRegistry(); try { Scheme s = new Scheme("https", new WebDavSocketFactory(hostname, 443), 443); reg.register(s); } catch (NoSuchAlgorithmException nsa) { Timber.e(nsa, "NoSuchAlgorithmException in getHttpClient"); throw new MessagingException("NoSuchAlgorithmException in getHttpClient: ", nsa); } catch (KeyManagementException kme) { Timber.e(kme, "KeyManagementException in getHttpClient"); throw new MessagingException("KeyManagementException in getHttpClient: ", kme); } } return httpClient; } protected InputStream sendRequest(String url, String method, StringEntity messageBody, Map<String, String> headers, boolean tryAuth) throws MessagingException { if (url == null || method == null) { return null; } WebDavHttpClient httpClient = getHttpClient(); try { int statusCode; HttpGeneric httpMethod = new HttpGeneric(url); HttpResponse response; HttpEntity entity; if (messageBody != null) { httpMethod.setEntity(messageBody); } if (headers != null) { for (Map.Entry<String, String> entry : headers.entrySet()) { httpMethod.setHeader(entry.getKey(), entry.getValue()); } } if (authenticationType == WebDavConstants.AUTH_TYPE_NONE) { if (!tryAuth || !authenticate()) { throw new MessagingException("Unable to authenticate in sendRequest()."); } } else if (authenticationType == WebDavConstants.AUTH_TYPE_BASIC) { httpMethod.setHeader("Authorization", authString); } httpMethod.setMethod(method); response = httpClient.executeOverride(httpMethod, httpContext); statusCode = response.getStatusLine().getStatusCode(); entity = response.getEntity(); if (statusCode == 401) { throw new MessagingException("Invalid username or password for Basic authentication."); } else if (statusCode == 440) { if (tryAuth && authenticationType == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. performFormBasedAuthentication(null); sendRequest(url, method, messageBody, headers, false); } else { throw new MessagingException("Authentication failure in sendRequest()."); } } else if (statusCode == 302) { handleUnexpectedRedirect(response, url); } else if (statusCode < 200 || statusCode >= 300) { throw new IOException("Error with code " + statusCode + " during request processing: " + response.getStatusLine().toString()); } if (entity != null) { return WebDavHttpClient.getUngzippedContent(entity); } } catch (UnsupportedEncodingException uee) { Timber.e(uee, "UnsupportedEncodingException: "); throw new MessagingException("UnsupportedEncodingException", uee); } catch (IOException ioe) { Timber.e(ioe, "IOException: "); throw new MessagingException("IOException", ioe); } return null; } private void handleUnexpectedRedirect(HttpResponse response, String url) throws IOException { if (response.getFirstHeader("Location") != null) { // TODO: This may indicate lack of authentication or may alternatively be something we should follow throw new IOException("Unexpected redirect during request processing. " + "Expected response from: " + url + " but told to redirect to:" + response.getFirstHeader("Location").getValue()); } else { throw new IOException("Unexpected redirect during request processing. " + "Expected response from: " + url + " but not told where to redirect to"); } } public String getAuthString() { return authString; } /** * Performs an HttpRequest to the supplied url using the supplied method. messageBody and headers are optional as * not all requests will need them. There are two signatures to support calls that don't require parsing of the * response. */ DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers) throws MessagingException { return processRequest(url, method, messageBody, headers, true); } DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers, boolean needsParsing) throws MessagingException { DataSet dataset = new DataSet(); if (K9MailLib.isDebug() && DEBUG_PROTOCOL_WEBDAV) { Timber.v("processRequest url = '%s', method = '%s', messageBody = '%s'", url, method, messageBody); } if (url == null || method == null) { return dataset; } getHttpClient(); try { StringEntity messageEntity = null; if (messageBody != null) { messageEntity = new StringEntity(messageBody); messageEntity.setContentType("text/xml"); } InputStream istream = sendRequest(url, method, messageEntity, headers, true); if (istream != null && needsParsing) { try { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setNamespaceAware(true); //This should be a no-op on Android, but makes the tests work SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); WebDavHandler myHandler = new WebDavHandler(); xr.setContentHandler(myHandler); xr.parse(new InputSource(istream)); dataset = myHandler.getDataSet(); } catch (SAXException se) { Timber.e(se, "SAXException in processRequest()"); throw new MessagingException("SAXException in processRequest() ", se); } catch (ParserConfigurationException pce) { Timber.e(pce, "ParserConfigurationException in processRequest()"); throw new MessagingException("ParserConfigurationException in processRequest() ", pce); } istream.close(); } } catch (UnsupportedEncodingException uee) { Timber.e(uee, "UnsupportedEncodingException: "); throw new MessagingException("UnsupportedEncodingException in processRequest() ", uee); } catch (IOException ioe) { Timber.e(ioe, "IOException: "); throw new MessagingException("IOException in processRequest() ", ioe); } return dataset; } @Override public boolean isSendCapable() { return true; } @Override public void sendMessages(List<? extends Message> messages) throws MessagingException { WebDavFolder tmpFolder = getFolder(mStoreConfig.getDraftsFolderName()); try { tmpFolder.open(Folder.OPEN_MODE_RW); List<? extends Message> retMessages = tmpFolder.appendWebDavMessages(messages); tmpFolder.moveMessages(retMessages, getSendSpoolFolder()); } finally { if (tmpFolder != null) { tmpFolder.close(); } } } }
/* Derby - Class com.pivotal.gemfirexd.internal.impl.sql.execute.IndexChanger Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Changes for GemFireXD distributed data platform (some marked by "GemStone changes") * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.impl.sql.execute; import java.util.Properties; // GemStone changes BEGIN import com.pivotal.gemfirexd.internal.engine.access.index.MemIndexScanController; import com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils; // GemStone changes END import com.pivotal.gemfirexd.internal.catalog.UUID; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.SQLState; import com.pivotal.gemfirexd.internal.iapi.services.i18n.MessageService; import com.pivotal.gemfirexd.internal.iapi.services.io.FormatableBitSet; import com.pivotal.gemfirexd.internal.iapi.services.monitor.Monitor; import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager; import com.pivotal.gemfirexd.internal.iapi.sql.Activation; import com.pivotal.gemfirexd.internal.iapi.sql.ResultDescription; import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ConglomerateDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ConstraintDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.DataDictionary; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.IndexRowGenerator; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.execute.CursorResultSet; import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecIndexRow; import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow; import com.pivotal.gemfirexd.internal.iapi.store.access.ConglomerateController; import com.pivotal.gemfirexd.internal.iapi.store.access.DynamicCompiledOpenConglomInfo; import com.pivotal.gemfirexd.internal.iapi.store.access.ScanController; import com.pivotal.gemfirexd.internal.iapi.store.access.StaticCompiledOpenConglomInfo; import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController; import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.RowLocation; /** Perform Index maintenace associated with DML operations for a single index. */ public class IndexChanger { private IndexRowGenerator irg; //Index Conglomerate ID private long indexCID; private DynamicCompiledOpenConglomInfo indexDCOCI; private StaticCompiledOpenConglomInfo indexSCOCI; private String indexName; private ConglomerateController baseCC; private TransactionController tc; private int lockMode; private FormatableBitSet baseRowReadMap; private ConglomerateController indexCC = null; private ScanController indexSC = null; // //Index rows used by this module to perform DML. private ExecIndexRow ourIndexRow = null; private ExecIndexRow ourUpdatedIndexRow = null; private TemporaryRowHolderImpl rowHolder = null; private boolean rowHolderPassedIn; private int isolationLevel; private final Activation activation; private boolean ownIndexSC = true; /** Create an IndexChanger @param irg the IndexRowGenerator for the index. @param indexCID the conglomerate id for the index. @param indexSCOCI the SCOCI for the idexes. @param indexDCOCI the DCOCI for the idexes. @param baseCC the ConglomerateController for the base table. @param tc The TransactionController @param lockMode The lock mode (granularity) to use @param baseRowReadMap Map of columns read in. 1 based. @param isolationLevel Isolation level to use. @param activation Current activation @exception StandardException Thrown on error */ public IndexChanger ( IndexRowGenerator irg, long indexCID, StaticCompiledOpenConglomInfo indexSCOCI, DynamicCompiledOpenConglomInfo indexDCOCI, String indexName, ConglomerateController baseCC, TransactionController tc, int lockMode, FormatableBitSet baseRowReadMap, int isolationLevel, Activation activation ) throws StandardException { this.irg = irg; this.indexCID = indexCID; this.indexSCOCI = indexSCOCI; this.indexDCOCI = indexDCOCI; this.baseCC = baseCC; this.tc = tc; this.lockMode = lockMode; this.baseRowReadMap = baseRowReadMap; this.rowHolderPassedIn = false; this.isolationLevel = isolationLevel; this.activation = activation; this.indexName = indexName; // activation will be null when called from DataDictionary if (activation != null && activation.getIndexConglomerateNumber() == indexCID) { ownIndexSC = false; } if (SanityManager.DEBUG) { SanityManager.ASSERT(tc != null, "TransactionController argument to constructor is null"); SanityManager.ASSERT(irg != null, "IndexRowGenerator argument to constructor is null"); } } /** * Set the row holder for this changer to use. * If the row holder is set, it wont bother * saving copies of rows needed for deferred * processing. Also, it will never close the * passed in rowHolder. * * @param rowHolder the row holder */ public void setRowHolder(TemporaryRowHolderImpl rowHolder) { this.rowHolder = rowHolder; rowHolderPassedIn = (rowHolder != null); } /** * Propagate the heap's ConglomerateController to * this index changer. * * @param baseCC The heap's ConglomerateController. */ public void setBaseCC(ConglomerateController baseCC) { this.baseCC = baseCC; } /** Set the column values for 'ourIndexRow' to refer to a base table row and location provided by the caller. The idea here is to @param baseRow a base table row. @param baseRowLoc baseRowLoc baseRow's location @exception StandardException Thrown on error */ private void setOurIndexRow(ExecRow baseRow, RowLocation baseRowLoc) throws StandardException { if (ourIndexRow == null) ourIndexRow = irg.getIndexRowTemplate(); irg.getIndexRow(baseRow, baseRowLoc, ourIndexRow, baseRowReadMap); } /** Set the column values for 'ourUpdatedIndexRow' to refer to a base table row and location provided by the caller. The idea here is to @param baseRow a base table row. @param baseRowLoc baseRowLoc baseRow's location @exception StandardException Thrown on error */ private void setOurUpdatedIndexRow(ExecRow baseRow, RowLocation baseRowLoc) throws StandardException { if (ourUpdatedIndexRow == null) ourUpdatedIndexRow = irg.getIndexRowTemplate(); irg.getIndexRow(baseRow, baseRowLoc, ourUpdatedIndexRow, baseRowReadMap); } /** * Determine whether or not any columns in the current index * row are being changed by the update. No need to update the * index if no columns changed. * * @return Nothing. * * @exception StandardException Thrown on error */ private boolean indexRowChanged() throws StandardException { int numColumns = ourIndexRow.nColumns(); for (int index = 1; index <= numColumns; index++) { DataValueDescriptor oldOrderable = ourIndexRow.getColumn(index); DataValueDescriptor newOrderable = ourUpdatedIndexRow.getColumn(index); if (! (oldOrderable.compare(DataValueDescriptor.ORDER_OP_EQUALS, newOrderable, true, true))) { return true; } } return false; } /** Position our index scan to 'ourIndexRow'. <P>This creates the scan the first time it is called. @exception StandardException Thrown on error */ private void setScan() throws StandardException { /* Get the SC from the activation if re-using */ if (! ownIndexSC) { indexSC = activation.getIndexScanController(); } else if (indexSC == null) { RowLocation templateBaseRowLocation = baseCC.newRowLocationTemplate(); /* DataDictionary doesn't have compiled info */ if (indexSCOCI == null) { indexSC = tc.openScan( indexCID, false, /* hold */ TransactionController.OPENMODE_FORUPDATE, /* forUpdate */ lockMode, isolationLevel, (FormatableBitSet)null, /* all fields */ ourIndexRow.getRowArray(), /* startKeyValue */ ScanController.GE, /* startSearchOp */ null, /* qualifier */ ourIndexRow.getRowArray(), /* stopKeyValue */ ScanController.GT /* stopSearchOp */ // GemStone changes BEGIN , null // GemStone changes END ); } else { indexSC = tc.openCompiledScan( false, /* hold */ TransactionController.OPENMODE_FORUPDATE, /* forUpdate */ lockMode, isolationLevel, (FormatableBitSet)null, /* all fields */ ourIndexRow.getRowArray(), /* startKeyValue */ ScanController.GE, /* startSearchOp */ null, /* qualifier */ ourIndexRow.getRowArray(), /* stopKeyValue */ ScanController.GT, /* stopSearchOp */ indexSCOCI, indexDCOCI ); } } else { indexSC.reopenScan( ourIndexRow.getRowArray(), /* startKeyValue */ ScanController.GE, /* startSearchOperator */ null, /* qualifier */ ourIndexRow.getRowArray(), /* stopKeyValue */ ScanController.GT /* stopSearchOperator */, // GemStone changes BEGIN null // GemStone changes END ); } } /** Close our index Conglomerate Controller */ private void closeIndexCC() throws StandardException { if (indexCC != null) indexCC.close(); indexCC = null; } /** Close our index ScanController. */ private void closeIndexSC() throws StandardException { /* Only consider closing index SC if we own it. */ if (ownIndexSC && indexSC != null) { indexSC.close(); indexSC = null; } } /** Delete a row from our index. This assumes our index ScanController is positioned before the row by setScan if we own the SC, otherwise it is positioned on the row by the underlying index scan. <P>This verifies the row exists and is unique. @exception StandardException Thrown on error */ private void doDelete() throws StandardException { if (ownIndexSC) { if (! indexSC.next()) { // This means that the entry for the index does not exist, this // is a serious problem with the index. Past fixed problems // like track 3703 can leave db's in the field with this problem // even though the bug in the code which caused it has long // since been fixed. Then the problem can surface months later // when the customer attempts to upgrade. By "ignoring" the // missing row here the problem is automatically "fixed" and // since the code is trying to delete the row anyway it doesn't // seem like such a bad idea. It also then gives a tool to // support to be able to fix some system catalog problems where // they can delete the base rows by dropping the system objects // like stored statements. //Gemstone changes BEGIN if (GemFireXDUtils.TracePersistIndex) { ((MemIndexScanController)indexSC).dumpIndex(null); } // Gemstone changes END. if (SanityManager.DEBUG) SanityManager.THROWASSERT( "Index row "+RowUtil.toString(ourIndexRow)+ " not found in conglomerateid " + indexCID + // GemStone changes BEGIN // added size of index "Current scan = " + indexSC + " the size of the index : "+((MemIndexScanController)indexSC).sizeOfIndex()); ((MemIndexScanController)indexSC).dumpIndex(null); // GemStone changes END Object[] args = new Object[2]; args[0] = ourIndexRow.getRowArray()[ourIndexRow.getRowArray().length - 1]; // GemStone changes BEGIN // changed to use valueOf() args[1] = Long.valueOf(indexCID); /* (original code) args[1] = new Long(indexCID); */ // GemStone changes END Monitor.getStream().println(MessageService.getCompleteMessage( SQLState.LANG_IGNORE_MISSING_INDEX_ROW_DURING_DELETE, args)); // just return indicating the row has been deleted. return; } } indexSC.delete(); } /** Insert a row into our indes. <P>This opens our index ConglomeratController the first time it is called. @exception StandardException Thrown on error */ private void doInsert() throws StandardException { insertAndCheckDups(ourIndexRow); } /** Insert a row into the temporary conglomerate <P>This opens our deferred ConglomeratController the first time it is called. @exception StandardException Thrown on error */ private void doDeferredInsert() throws StandardException { if (rowHolder == null) { Properties properties = new Properties(); // Get the properties on the index openIndexCC().getInternalTablePropertySet(properties); /* ** Create our row holder. it is ok to skip passing ** in the result description because if we don't already ** have a row holder, then we are the only user of the ** row holder (the description is needed when the row ** holder is going to be handed to users for triggers). */ rowHolder = new TemporaryRowHolderImpl(activation, properties); } /* ** If the user of the IndexChanger already ** had a row holder, then we don't need to ** bother saving deferred inserts -- they ** have already done so. */ if (!rowHolderPassedIn) { rowHolder.insert(ourIndexRow); } } /** * Insert the given row into the given conglomerate and check for duplicate * key error. * * @param row The row to insert * * @exception StandardException Thrown on duplicate key error */ private void insertAndCheckDups(ExecIndexRow row) throws StandardException { openIndexCC(); int insertStatus = indexCC.insert(row.getRowArray()); if (insertStatus == ConglomerateController.ROWISDUPLICATE) { /* ** We have a duplicate key error. */ String indexOrConstraintName = indexName; // now get table name, and constraint name if needed LanguageConnectionContext lcc = activation.getLanguageConnectionContext(); DataDictionary dd = lcc.getDataDictionary(); //get the descriptors ConglomerateDescriptor cd = dd.getConglomerateDescriptor(indexCID); UUID tableID = cd.getTableID(); TableDescriptor td = dd.getTableDescriptor(tableID); String tableName = td.getName(); if (indexOrConstraintName == null) // no index name passed in { ConstraintDescriptor conDesc = dd.getConstraintDescriptor(td, cd.getUUID()); indexOrConstraintName = conDesc.getConstraintName(); } StandardException se = StandardException.newException( SQLState.LANG_DUPLICATE_KEY_CONSTRAINT, indexOrConstraintName, tableName); throw se; } if (SanityManager.DEBUG) { if (insertStatus != 0) { SanityManager.THROWASSERT("Unknown insert status " + insertStatus); } } } /** * Open the ConglomerateController for this index if it isn't open yet. * * @return The ConglomerateController for this index. * * @exception StandardException Thrown on duplicate key error */ private ConglomerateController openIndexCC() throws StandardException { if (indexCC == null) { /* DataDictionary doesn't have compiled info */ if (indexSCOCI == null) { indexCC = tc.openConglomerate( indexCID, false, (TransactionController.OPENMODE_FORUPDATE | TransactionController.OPENMODE_BASEROW_INSERT_LOCKED), lockMode, isolationLevel); } else { indexCC = tc.openCompiledConglomerate( false, (TransactionController.OPENMODE_FORUPDATE | TransactionController.OPENMODE_BASEROW_INSERT_LOCKED), lockMode, isolationLevel, indexSCOCI, indexDCOCI); } } return indexCC; } /** Open this IndexChanger. @exception StandardException Thrown on error */ public void open() throws StandardException { } /** Perform index maintenance to support a delete of a base table row. @param baseRow the base table row. @param baseRowLocation the base table row's location. @exception StandardException Thrown on error */ public void delete(ExecRow baseRow, RowLocation baseRowLocation) throws StandardException { setOurIndexRow(baseRow, baseRowLocation); setScan(); doDelete(); } /** Perform index maintenance to support an update of a base table row. @param oldBaseRow the old image of the base table row. @param newBaseRow the new image of the base table row. @param baseRowLocation the base table row's location. @exception StandardException Thrown on error */ public void update(ExecRow oldBaseRow, ExecRow newBaseRow, RowLocation baseRowLocation ) throws StandardException { setOurIndexRow(oldBaseRow, baseRowLocation); setOurUpdatedIndexRow(newBaseRow, baseRowLocation); /* We skip the update in the degenerate case * where none of the key columns changed. * (From an actual customer case.) */ if (indexRowChanged()) { setScan(); doDelete(); insertForUpdate(newBaseRow, baseRowLocation); } } /** Perform index maintenance to support an insert of a base table row. @param newRow the base table row. @param baseRowLocation the base table row's location. @exception StandardException Thrown on error */ public void insert(ExecRow newRow, RowLocation baseRowLocation) throws StandardException { setOurIndexRow(newRow, baseRowLocation); doInsert(); } /** If we're updating a unique index, the inserts have to be deferred. This is to avoid uniqueness violations that are only temporary. If we do all the deletes first, only "true" uniqueness violations can happen. We do this here, rather than in open(), because this is the only operation that requires deferred inserts, and we only want to create the conglomerate if necessary. @param newRow the base table row. @param baseRowLocation the base table row's location. @exception StandardException Thrown on error */ void insertForUpdate(ExecRow newRow, RowLocation baseRowLocation) throws StandardException { setOurIndexRow(newRow, baseRowLocation); //defer inserts if its on unique or UniqueWhereNotNull index if (irg.isUnique() || irg.isUniqueWithDuplicateNulls()) { doDeferredInsert(); } else { doInsert(); } } /** Finish doing the changes for this index. This is intended for deferred inserts for unique indexes. It has no effect unless we are doing an update of a unique index. @exception StandardException Thrown on error */ public void finish() throws StandardException { ExecRow deferredRow; /* Deferred processing only necessary for unique indexes */ if (rowHolder != null) { CursorResultSet rs = rowHolder.getResultSet(); try { rs.open(); while ((deferredRow = rs.getNextRow()) != null) { if (SanityManager.DEBUG) { if (!(deferredRow instanceof ExecIndexRow)) { SanityManager.THROWASSERT("deferredRow isn't an instance "+ "of ExecIndexRow as expected. "+ "It is an "+deferredRow.getClass().getName()); } } insertAndCheckDups((ExecIndexRow)deferredRow); } } finally { rs.close(false); /* ** If row holder was passed in, let the ** client of this method clean it up. */ if (!rowHolderPassedIn) { rowHolder.close(); } } } } /** Close this IndexChanger. @exception StandardException Thrown on error */ public void close() throws StandardException { closeIndexCC(); closeIndexSC(); if (rowHolder != null && !rowHolderPassedIn) { rowHolder.close(); } baseCC = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.planner.logical; import java.math.BigDecimal; import java.util.GregorianCalendar; import java.util.LinkedList; import java.util.List; import com.google.common.base.Preconditions; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.expression.ExpressionPosition; import org.apache.drill.common.expression.FieldReference; import org.apache.drill.common.expression.FunctionCallFactory; import org.apache.drill.common.expression.IfExpression; import org.apache.drill.common.expression.IfExpression.IfCondition; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.expression.NullExpression; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.expression.TypedNullConstant; import org.apache.drill.common.expression.ValueExpressions; import org.apache.drill.common.expression.ValueExpressions.QuotedString; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.planner.StarColumnHelper; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexCorrelVariable; import org.apache.calcite.rex.RexDynamicParam; import org.apache.calcite.rex.RexFieldAccess; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexLocalRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexOver; import org.apache.calcite.rex.RexRangeRef; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlSyntax; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.util.NlsString; import com.google.common.collect.Lists; import org.apache.drill.exec.planner.physical.PlannerSettings; import org.apache.drill.exec.work.ExecErrorConstants; /** * Utilities for Drill's planner. */ public class DrillOptiq { public static final String UNSUPPORTED_REX_NODE_ERROR = "Cannot convert RexNode to equivalent Drill expression. "; private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillOptiq.class); /** * Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax using one input. * * @param context parse context which contains planner settings * @param input data input * @param expr expression to be converted * @return converted expression */ public static LogicalExpression toDrill(DrillParseContext context, RelNode input, RexNode expr) { return toDrill(context, Lists.newArrayList(input), expr); } /** * Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax using multiple inputs. * * @param context parse context which contains planner settings * @param inputs multiple data inputs * @param expr expression to be converted * @return converted expression */ public static LogicalExpression toDrill(DrillParseContext context, List<RelNode> inputs, RexNode expr) { final RexToDrill visitor = new RexToDrill(context, inputs); return expr.accept(visitor); } private static class RexToDrill extends RexVisitorImpl<LogicalExpression> { private final List<RelNode> inputs; private final DrillParseContext context; private final List<RelDataTypeField> fieldList; RexToDrill(DrillParseContext context, List<RelNode> inputs) { super(true); this.context = context; this.inputs = inputs; this.fieldList = Lists.newArrayList(); /* Fields are enumerated by their presence order in input. Details {@link org.apache.calcite.rex.RexInputRef}. Thus we can merge field list from several inputs by adding them into the list in order of appearance. Each field index in the list will match field index in the RexInputRef instance which will allow us to retrieve field from filed list by index in {@link #visitInputRef(RexInputRef)} method. Example: Query: select t1.c1, t2.c1. t2.c2 from t1 inner join t2 on t1.c1 between t2.c1 and t2.c2 Input 1: $0 Input 2: $1, $2 Result: $0, $1, $2 */ for (RelNode input : inputs) { if (input != null) { fieldList.addAll(input.getRowType().getFieldList()); } } } @Override public LogicalExpression visitInputRef(RexInputRef inputRef) { final int index = inputRef.getIndex(); final RelDataTypeField field = fieldList.get(index); Preconditions.checkNotNull(field, "Unable to find field using input reference"); return FieldReference.getWithQuotedRef(field.getName()); } @Override public LogicalExpression visitCall(RexCall call) { // logger.debug("RexCall {}, {}", call); final SqlSyntax syntax = call.getOperator().getSyntax(); switch (syntax) { case BINARY: logger.debug("Binary"); final String funcName = call.getOperator().getName().toLowerCase(); return doFunction(call, funcName); case FUNCTION: case FUNCTION_ID: logger.debug("Function"); return getDrillFunctionFromOptiqCall(call); case POSTFIX: logger.debug("Postfix"); switch(call.getKind()){ case IS_NOT_NULL: case IS_NOT_TRUE: case IS_NOT_FALSE: case IS_NULL: case IS_TRUE: case IS_FALSE: case OTHER: return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(), ExpressionPosition.UNKNOWN, call.getOperands().get(0).accept(this)); } throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); case PREFIX: logger.debug("Prefix"); LogicalExpression arg = call.getOperands().get(0).accept(this); switch(call.getKind()){ case NOT: return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(), ExpressionPosition.UNKNOWN, arg); case MINUS_PREFIX: final RexBuilder builder = inputs.get(0).getCluster().getRexBuilder(); final List<RexNode> operands = Lists.newArrayList(); operands.add(builder.makeExactLiteral(new BigDecimal(-1))); operands.add(call.getOperands().get(0)); return visitCall((RexCall) builder.makeCall( SqlStdOperatorTable.MULTIPLY, operands)); } throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); case SPECIAL: logger.debug("Special"); switch(call.getKind()){ case CAST: return getDrillCastFunctionFromOptiq(call); case LIKE: case SIMILAR: return getDrillFunctionFromOptiqCall(call); case CASE: List<LogicalExpression> caseArgs = Lists.newArrayList(); for(RexNode r : call.getOperands()){ caseArgs.add(r.accept(this)); } caseArgs = Lists.reverse(caseArgs); // number of arguements are always going to be odd, because // Optiq adds "null" for the missing else expression at the end assert caseArgs.size()%2 == 1; LogicalExpression elseExpression = caseArgs.get(0); for (int i=1; i<caseArgs.size(); i=i+2) { elseExpression = IfExpression.newBuilder() .setElse(elseExpression) .setIfCondition(new IfCondition(caseArgs.get(i + 1), caseArgs.get(i))).build(); } return elseExpression; } if (call.getOperator() == SqlStdOperatorTable.ITEM) { SchemaPath left = (SchemaPath) call.getOperands().get(0).accept(this); // Convert expr of item[*, 'abc'] into column expression 'abc' String rootSegName = left.getRootSegment().getPath(); if (StarColumnHelper.isStarColumn(rootSegName)) { rootSegName = rootSegName.substring(0, rootSegName.indexOf("*")); final RexLiteral literal = (RexLiteral) call.getOperands().get(1); return SchemaPath.getSimplePath(rootSegName + literal.getValue2().toString()); } final RexLiteral literal = (RexLiteral) call.getOperands().get(1); switch(literal.getTypeName()){ case DECIMAL: case INTEGER: return left.getChild(((BigDecimal)literal.getValue()).intValue()); case CHAR: return left.getChild(literal.getValue2().toString()); default: // fall through } } if (call.getOperator() == SqlStdOperatorTable.DATETIME_PLUS) { return doFunction(call, "+"); } // fall through default: throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); } } private LogicalExpression doFunction(RexCall call, String funcName) { List<LogicalExpression> args = Lists.newArrayList(); for(RexNode r : call.getOperands()){ args.add(r.accept(this)); } if (FunctionCallFactory.isBooleanOperator(funcName)) { LogicalExpression func = FunctionCallFactory.createBooleanOperator(funcName, args); return func; } else { args = Lists.reverse(args); LogicalExpression lastArg = args.get(0); for(int i = 1; i < args.size(); i++){ lastArg = FunctionCallFactory.createExpression(funcName, Lists.newArrayList(args.get(i), lastArg)); } return lastArg; } } private LogicalExpression doUnknown(RexNode o){ // raise an error throw UserException.planError().message(UNSUPPORTED_REX_NODE_ERROR + "RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()).build(logger); } @Override public LogicalExpression visitLocalRef(RexLocalRef localRef) { return doUnknown(localRef); } @Override public LogicalExpression visitOver(RexOver over) { return doUnknown(over); } @Override public LogicalExpression visitCorrelVariable(RexCorrelVariable correlVariable) { return doUnknown(correlVariable); } @Override public LogicalExpression visitDynamicParam(RexDynamicParam dynamicParam) { return doUnknown(dynamicParam); } @Override public LogicalExpression visitRangeRef(RexRangeRef rangeRef) { return doUnknown(rangeRef); } @Override public LogicalExpression visitFieldAccess(RexFieldAccess fieldAccess) { return super.visitFieldAccess(fieldAccess); } private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){ LogicalExpression arg = call.getOperands().get(0).accept(this); MajorType castType; switch(call.getType().getSqlTypeName().getName()){ case "VARCHAR": case "CHAR": castType = Types.required(MinorType.VARCHAR).toBuilder().setPrecision(call.getType().getPrecision()).build(); break; case "INTEGER": castType = Types.required(MinorType.INT); break; case "FLOAT": castType = Types.required(MinorType.FLOAT4); break; case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break; case "DECIMAL": if (!context.getPlannerSettings().getOptions().getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val) { throw UserException .unsupportedError() .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG) .build(logger); } int precision = call.getType().getPrecision(); int scale = call.getType().getScale(); if (precision <= 9) { castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build(); } else if (precision <= 18) { castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build(); } else if (precision <= 28) { // Inject a cast to SPARSE before casting to the dense type. castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build(); } else if (precision <= 38) { castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build(); } else { throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported"); } break; case "INTERVAL_YEAR_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break; case "INTERVAL_DAY_TIME": castType = Types.required(MinorType.INTERVALDAY); break; case "BOOLEAN": castType = Types.required(MinorType.BIT); break; case "BINARY": castType = Types.required(MinorType.VARBINARY); break; case "ANY": return arg; // Type will be same as argument. default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName())); } return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg); } private LogicalExpression getDrillFunctionFromOptiqCall(RexCall call) { List<LogicalExpression> args = Lists.newArrayList(); for(RexNode n : call.getOperands()){ args.add(n.accept(this)); } int argsSize = args.size(); String functionName = call.getOperator().getName().toLowerCase(); // TODO: once we have more function rewrites and a patter emerges from different rewrites, factor this out in a better fashion /* Rewrite extract functions in the following manner * extract(year, date '2008-2-23') ---> extractYear(date '2008-2-23') */ if (functionName.equals("extract")) { // Assert that the first argument to extract is a QuotedString assert args.get(0) instanceof ValueExpressions.QuotedString; // Get the unit of time to be extracted String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value; switch (timeUnitStr){ case ("YEAR"): case ("MONTH"): case ("DAY"): case ("HOUR"): case ("MINUTE"): case ("SECOND"): String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase(); functionName += functionPostfix; return FunctionCallFactory.createExpression(functionName, args.subList(1, 2)); default: throw new UnsupportedOperationException("extract function supports the following time units: YEAR, MONTH, DAY, HOUR, MINUTE, SECOND"); } } else if (functionName.equals("trim")) { String trimFunc = null; List<LogicalExpression> trimArgs = Lists.newArrayList(); assert args.get(0) instanceof ValueExpressions.QuotedString; switch (((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase()) { case "LEADING": trimFunc = "ltrim"; break; case "TRAILING": trimFunc = "rtrim"; break; case "BOTH": trimFunc = "btrim"; break; default: assert 1 == 0; } trimArgs.add(args.get(2)); trimArgs.add(args.get(1)); return FunctionCallFactory.createExpression(trimFunc, trimArgs); } else if (functionName.equals("date_part")) { // Rewrite DATE_PART functions as extract functions // assert that the function has exactly two arguments assert argsSize == 2; /* Based on the first input to the date_part function we rewrite the function as the * appropriate extract function. For example * date_part('year', date '2008-2-23') ------> extractYear(date '2008-2-23') */ assert args.get(0) instanceof QuotedString; QuotedString extractString = (QuotedString) args.get(0); String functionPostfix = extractString.value.substring(0, 1).toUpperCase() + extractString.value.substring(1).toLowerCase(); return FunctionCallFactory.createExpression("extract" + functionPostfix, args.subList(1, 2)); } else if (functionName.equals("concat")) { if (argsSize == 1) { /* * We treat concat with one argument as a special case. Since we don't have a function * implementation of concat that accepts one argument. We simply add another dummy argument * (empty string literal) to the list of arguments. */ List<LogicalExpression> concatArgs = new LinkedList<>(args); concatArgs.add(QuotedString.EMPTY_STRING); return FunctionCallFactory.createExpression(functionName, concatArgs); } else if (argsSize > 2) { List<LogicalExpression> concatArgs = Lists.newArrayList(); /* stack concat functions on top of each other if we have more than two arguments * Eg: concat(col1, col2, col3) => concat(concat(col1, col2), col3) */ concatArgs.add(args.get(0)); concatArgs.add(args.get(1)); LogicalExpression first = FunctionCallFactory.createExpression(functionName, concatArgs); for (int i = 2; i < argsSize; i++) { concatArgs = Lists.newArrayList(); concatArgs.add(first); concatArgs.add(args.get(i)); first = FunctionCallFactory.createExpression(functionName, concatArgs); } return first; } } else if (functionName.equals("length")) { if (argsSize == 2) { // Second argument should always be a literal specifying the encoding format assert args.get(1) instanceof ValueExpressions.QuotedString; String encodingType = ((ValueExpressions.QuotedString) args.get(1)).value; functionName += encodingType.substring(0, 1).toUpperCase() + encodingType.substring(1).toLowerCase(); return FunctionCallFactory.createExpression(functionName, args.subList(0, 1)); } } else if ((functionName.equals("convert_from") || functionName.equals("convert_to")) && args.get(1) instanceof QuotedString) { return FunctionCallFactory.createConvert(functionName, ((QuotedString)args.get(1)).value, args.get(0), ExpressionPosition.UNKNOWN); } else if (functionName.equals("date_trunc")) { return handleDateTruncFunction(args); } return FunctionCallFactory.createExpression(functionName, args); } private LogicalExpression handleDateTruncFunction(final List<LogicalExpression> args) { // Assert that the first argument to extract is a QuotedString assert args.get(0) instanceof ValueExpressions.QuotedString; // Get the unit of time to be extracted String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase(); switch (timeUnitStr){ case ("YEAR"): case ("MONTH"): case ("DAY"): case ("HOUR"): case ("MINUTE"): case ("SECOND"): case ("WEEK"): case ("QUARTER"): case ("DECADE"): case ("CENTURY"): case ("MILLENNIUM"): final String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase(); return FunctionCallFactory.createExpression("date_trunc_" + functionPostfix, args.subList(1, 2)); } throw new UnsupportedOperationException("date_trunc function supports the following time units: " + "YEAR, MONTH, DAY, HOUR, MINUTE, SECOND, WEEK, QUARTER, DECADE, CENTURY, MILLENNIUM"); } @Override public LogicalExpression visitLiteral(RexLiteral literal) { switch(literal.getType().getSqlTypeName()){ case BIGINT: if (isLiteralNull(literal)) { return createNullExpr(MinorType.BIGINT); } long l = (((BigDecimal) literal.getValue()).setScale(0, BigDecimal.ROUND_HALF_UP)).longValue(); return ValueExpressions.getBigInt(l); case BOOLEAN: if (isLiteralNull(literal)) { return createNullExpr(MinorType.BIT); } return ValueExpressions.getBit(((Boolean) literal.getValue())); case CHAR: if (isLiteralNull(literal)) { return createStringNullExpr(literal.getType().getPrecision()); } return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue(), literal.getType().getPrecision()); case DOUBLE: if (isLiteralNull(literal)){ return createNullExpr(MinorType.FLOAT8); } double d = ((BigDecimal) literal.getValue()).doubleValue(); return ValueExpressions.getFloat8(d); case FLOAT: if (isLiteralNull(literal)) { return createNullExpr(MinorType.FLOAT4); } float f = ((BigDecimal) literal.getValue()).floatValue(); return ValueExpressions.getFloat4(f); case INTEGER: if (isLiteralNull(literal)) { return createNullExpr(MinorType.INT); } int a = (((BigDecimal) literal.getValue()).setScale(0, BigDecimal.ROUND_HALF_UP)).intValue(); return ValueExpressions.getInt(a); case DECIMAL: /* TODO: Enable using Decimal literals once we have more functions implemented for Decimal * For now continue using Double instead of decimals int precision = ((BigDecimal) literal.getValue()).precision(); if (precision <= 9) { return ValueExpressions.getDecimal9((BigDecimal)literal.getValue()); } else if (precision <= 18) { return ValueExpressions.getDecimal18((BigDecimal)literal.getValue()); } else if (precision <= 28) { return ValueExpressions.getDecimal28((BigDecimal)literal.getValue()); } else if (precision <= 38) { return ValueExpressions.getDecimal38((BigDecimal)literal.getValue()); } */ if (isLiteralNull(literal)) { return createNullExpr(MinorType.FLOAT8); } double dbl = ((BigDecimal) literal.getValue()).doubleValue(); logger.warn("Converting exact decimal into approximate decimal. Should be fixed once decimal is implemented."); return ValueExpressions.getFloat8(dbl); case VARCHAR: if (isLiteralNull(literal)) { return createStringNullExpr(literal.getType().getPrecision()); } return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue(), literal.getType().getPrecision()); case SYMBOL: if (isLiteralNull(literal)) { return createStringNullExpr(literal.getType().getPrecision()); } return ValueExpressions.getChar(literal.getValue().toString(), literal.getType().getPrecision()); case DATE: if (isLiteralNull(literal)) { return createNullExpr(MinorType.DATE); } return (ValueExpressions.getDate((GregorianCalendar)literal.getValue())); case TIME: if (isLiteralNull(literal)) { return createNullExpr(MinorType.TIME); } return (ValueExpressions.getTime((GregorianCalendar)literal.getValue())); case TIMESTAMP: if (isLiteralNull(literal)) { return createNullExpr(MinorType.TIMESTAMP); } return (ValueExpressions.getTimeStamp((GregorianCalendar) literal.getValue())); case INTERVAL_YEAR_MONTH: if (isLiteralNull(literal)) { return createNullExpr(MinorType.INTERVALYEAR); } return (ValueExpressions.getIntervalYear(((BigDecimal) (literal.getValue())).intValue())); case INTERVAL_DAY_TIME: if (isLiteralNull(literal)) { return createNullExpr(MinorType.INTERVALDAY); } return (ValueExpressions.getIntervalDay(((BigDecimal) (literal.getValue())).longValue())); case NULL: return NullExpression.INSTANCE; case ANY: if (isLiteralNull(literal)) { return NullExpression.INSTANCE; } default: throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Drill constant expression.", literal, literal.getType().getSqlTypeName())); } } /** * Create nullable major type using given minor type * and wraps it in typed null constant. * * @param type minor type * @return typed null constant instance */ private TypedNullConstant createNullExpr(MinorType type) { return new TypedNullConstant(Types.optional(type)); } /** * Create nullable varchar major type with given precision * and wraps it in typed null constant. * * @param precision precision value * @return typed null constant instance */ private TypedNullConstant createStringNullExpr(int precision) { return new TypedNullConstant(Types.withPrecision(MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL, precision)); } } public static boolean isLiteralNull(RexLiteral literal) { return literal.getTypeName().getName().equals("NULL"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.operators; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility; import org.apache.flink.runtime.state.InternalPriorityQueue; import org.apache.flink.runtime.state.KeyGroupRange; import org.apache.flink.runtime.state.KeyGroupedInternalPriorityQueue; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; import org.apache.flink.util.CloseableIterator; import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ScheduledFuture; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * {@link InternalTimerService} that stores timers on the Java heap. */ public class InternalTimerServiceImpl<K, N> implements InternalTimerService<N>, ProcessingTimeCallback { private final ProcessingTimeService processingTimeService; private final KeyContext keyContext; /** * Processing time timers that are currently in-flight. */ private final KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> processingTimeTimersQueue; /** * Event time timers that are currently in-flight. */ private final KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> eventTimeTimersQueue; /** * Information concerning the local key-group range. */ private final KeyGroupRange localKeyGroupRange; private final int localKeyGroupRangeStartIdx; /** * The local event time, as denoted by the last received * {@link org.apache.flink.streaming.api.watermark.Watermark Watermark}. */ private long currentWatermark = Long.MIN_VALUE; /** * The one and only Future (if any) registered to execute the * next {@link Triggerable} action, when its (processing) time arrives. * */ private ScheduledFuture<?> nextTimer; // Variables to be set when the service is started. private TypeSerializer<K> keySerializer; private TypeSerializer<N> namespaceSerializer; private Triggerable<K, N> triggerTarget; private volatile boolean isInitialized; private TypeSerializer<K> keyDeserializer; private TypeSerializer<N> namespaceDeserializer; /** The restored timers snapshot, if any. */ private InternalTimersSnapshot<K, N> restoredTimersSnapshot; InternalTimerServiceImpl( KeyGroupRange localKeyGroupRange, KeyContext keyContext, ProcessingTimeService processingTimeService, KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> processingTimeTimersQueue, KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> eventTimeTimersQueue) { this.keyContext = checkNotNull(keyContext); this.processingTimeService = checkNotNull(processingTimeService); this.localKeyGroupRange = checkNotNull(localKeyGroupRange); this.processingTimeTimersQueue = checkNotNull(processingTimeTimersQueue); this.eventTimeTimersQueue = checkNotNull(eventTimeTimersQueue); // find the starting index of the local key-group range int startIdx = Integer.MAX_VALUE; for (Integer keyGroupIdx : localKeyGroupRange) { startIdx = Math.min(keyGroupIdx, startIdx); } this.localKeyGroupRangeStartIdx = startIdx; } /** * Starts the local {@link InternalTimerServiceImpl} by: * <ol> * <li>Setting the {@code keySerialized} and {@code namespaceSerializer} for the timers it will contain.</li> * <li>Setting the {@code triggerTarget} which contains the action to be performed when a timer fires.</li> * <li>Re-registering timers that were retrieved after recovering from a node failure, if any.</li> * </ol> * This method can be called multiple times, as long as it is called with the same serializers. */ public void startTimerService( TypeSerializer<K> keySerializer, TypeSerializer<N> namespaceSerializer, Triggerable<K, N> triggerTarget) { if (!isInitialized) { if (keySerializer == null || namespaceSerializer == null) { throw new IllegalArgumentException("The TimersService serializers cannot be null."); } if (this.keySerializer != null || this.namespaceSerializer != null || this.triggerTarget != null) { throw new IllegalStateException("The TimerService has already been initialized."); } // the following is the case where we restore if (restoredTimersSnapshot != null) { TypeSerializerSchemaCompatibility<K> keySerializerCompatibility = restoredTimersSnapshot.getKeySerializerSnapshot().resolveSchemaCompatibility(keySerializer); if (keySerializerCompatibility.isIncompatible() || keySerializerCompatibility.isCompatibleAfterMigration()) { throw new IllegalStateException( "Tried to initialize restored TimerService with new key serializer that requires migration or is incompatible."); } TypeSerializerSchemaCompatibility<N> namespaceSerializerCompatibility = restoredTimersSnapshot.getNamespaceSerializerSnapshot().resolveSchemaCompatibility(namespaceSerializer); if (namespaceSerializerCompatibility.isIncompatible() || namespaceSerializerCompatibility.isCompatibleAfterMigration()) { throw new IllegalStateException( "Tried to initialize restored TimerService with new namespace serializer that requires migration or is incompatible."); } this.keySerializer = keySerializerCompatibility.isCompatibleAsIs() ? keySerializer : keySerializerCompatibility.getReconfiguredSerializer(); this.namespaceSerializer = namespaceSerializerCompatibility.isCompatibleAsIs() ? namespaceSerializer : namespaceSerializerCompatibility.getReconfiguredSerializer(); } else { this.keySerializer = keySerializer; this.namespaceSerializer = namespaceSerializer; } this.keyDeserializer = null; this.namespaceDeserializer = null; this.triggerTarget = Preconditions.checkNotNull(triggerTarget); // re-register the restored timers (if any) final InternalTimer<K, N> headTimer = processingTimeTimersQueue.peek(); if (headTimer != null) { nextTimer = processingTimeService.registerTimer(headTimer.getTimestamp(), this); } this.isInitialized = true; } else { if (!(this.keySerializer.equals(keySerializer) && this.namespaceSerializer.equals(namespaceSerializer))) { throw new IllegalArgumentException("Already initialized Timer Service " + "tried to be initialized with different key and namespace serializers."); } } } @Override public long currentProcessingTime() { return processingTimeService.getCurrentProcessingTime(); } @Override public long currentWatermark() { return currentWatermark; } @Override public void registerProcessingTimeTimer(N namespace, long time) { InternalTimer<K, N> oldHead = processingTimeTimersQueue.peek(); if (processingTimeTimersQueue.add(new TimerHeapInternalTimer<>(time, (K) keyContext.getCurrentKey(), namespace))) { long nextTriggerTime = oldHead != null ? oldHead.getTimestamp() : Long.MAX_VALUE; // check if we need to re-schedule our timer to earlier if (time < nextTriggerTime) { if (nextTimer != null) { nextTimer.cancel(false); } nextTimer = processingTimeService.registerTimer(time, this); } } } @Override public void registerEventTimeTimer(N namespace, long time) { eventTimeTimersQueue.add(new TimerHeapInternalTimer<>(time, (K) keyContext.getCurrentKey(), namespace)); } @Override public void deleteProcessingTimeTimer(N namespace, long time) { processingTimeTimersQueue.remove(new TimerHeapInternalTimer<>(time, (K) keyContext.getCurrentKey(), namespace)); } @Override public void deleteEventTimeTimer(N namespace, long time) { eventTimeTimersQueue.remove(new TimerHeapInternalTimer<>(time, (K) keyContext.getCurrentKey(), namespace)); } @Override public void onProcessingTime(long time) throws Exception { // null out the timer in case the Triggerable calls registerProcessingTimeTimer() // inside the callback. nextTimer = null; InternalTimer<K, N> timer; while ((timer = processingTimeTimersQueue.peek()) != null && timer.getTimestamp() <= time) { processingTimeTimersQueue.poll(); keyContext.setCurrentKey(timer.getKey()); triggerTarget.onProcessingTime(timer); } if (timer != null && nextTimer == null) { nextTimer = processingTimeService.registerTimer(timer.getTimestamp(), this); } } public void advanceWatermark(long time) throws Exception { currentWatermark = time; InternalTimer<K, N> timer; while ((timer = eventTimeTimersQueue.peek()) != null && timer.getTimestamp() <= time) { eventTimeTimersQueue.poll(); keyContext.setCurrentKey(timer.getKey()); triggerTarget.onEventTime(timer); } } /** * Snapshots the timers (both processing and event time ones) for a given {@code keyGroupIdx}. * * @param keyGroupIdx the id of the key-group to be put in the snapshot. * @return a snapshot containing the timers for the given key-group, and the serializers for them */ public InternalTimersSnapshot<K, N> snapshotTimersForKeyGroup(int keyGroupIdx) { return new InternalTimersSnapshot<>( keySerializer, namespaceSerializer, eventTimeTimersQueue.getSubsetForKeyGroup(keyGroupIdx), processingTimeTimersQueue.getSubsetForKeyGroup(keyGroupIdx)); } public TypeSerializer<K> getKeySerializer() { return keySerializer; } public TypeSerializer<N> getNamespaceSerializer() { return namespaceSerializer; } /** * Restore the timers (both processing and event time ones) for a given {@code keyGroupIdx}. * * @param restoredSnapshot the restored snapshot containing the key-group's timers, * and the serializers that were used to write them * @param keyGroupIdx the id of the key-group to be put in the snapshot. */ @SuppressWarnings("unchecked") public void restoreTimersForKeyGroup(InternalTimersSnapshot<?, ?> restoredSnapshot, int keyGroupIdx) { this.restoredTimersSnapshot = (InternalTimersSnapshot<K, N>) restoredSnapshot; TypeSerializer<K> restoredKeySerializer = restoredTimersSnapshot.getKeySerializerSnapshot().restoreSerializer(); if (this.keyDeserializer != null && !this.keyDeserializer.equals(restoredKeySerializer)) { throw new IllegalArgumentException("Tried to restore timers for the same service with different key serializers."); } this.keyDeserializer = restoredKeySerializer; TypeSerializer<N> restoredNamespaceSerializer = restoredTimersSnapshot.getNamespaceSerializerSnapshot().restoreSerializer(); if (this.namespaceDeserializer != null && !this.namespaceDeserializer.equals(restoredNamespaceSerializer)) { throw new IllegalArgumentException("Tried to restore timers for the same service with different namespace serializers."); } this.namespaceDeserializer = restoredNamespaceSerializer; checkArgument(localKeyGroupRange.contains(keyGroupIdx), "Key Group " + keyGroupIdx + " does not belong to the local range."); // restore the event time timers eventTimeTimersQueue.addAll(restoredTimersSnapshot.getEventTimeTimers()); // restore the processing time timers processingTimeTimersQueue.addAll(restoredTimersSnapshot.getProcessingTimeTimers()); } @VisibleForTesting public int numProcessingTimeTimers() { return this.processingTimeTimersQueue.size(); } @VisibleForTesting public int numEventTimeTimers() { return this.eventTimeTimersQueue.size(); } @VisibleForTesting public int numProcessingTimeTimers(N namespace) { return countTimersInNamespaceInternal(namespace, processingTimeTimersQueue); } @VisibleForTesting public int numEventTimeTimers(N namespace) { return countTimersInNamespaceInternal(namespace, eventTimeTimersQueue); } private int countTimersInNamespaceInternal(N namespace, InternalPriorityQueue<TimerHeapInternalTimer<K, N>> queue) { int count = 0; try (final CloseableIterator<TimerHeapInternalTimer<K, N>> iterator = queue.iterator()) { while (iterator.hasNext()) { final TimerHeapInternalTimer<K, N> timer = iterator.next(); if (timer.getNamespace().equals(namespace)) { count++; } } } catch (Exception e) { throw new FlinkRuntimeException("Exception when closing iterator.", e); } return count; } @VisibleForTesting int getLocalKeyGroupRangeStartIdx() { return this.localKeyGroupRangeStartIdx; } @VisibleForTesting List<Set<TimerHeapInternalTimer<K, N>>> getEventTimeTimersPerKeyGroup() { return partitionElementsByKeyGroup(eventTimeTimersQueue); } @VisibleForTesting List<Set<TimerHeapInternalTimer<K, N>>> getProcessingTimeTimersPerKeyGroup() { return partitionElementsByKeyGroup(processingTimeTimersQueue); } private <T> List<Set<T>> partitionElementsByKeyGroup(KeyGroupedInternalPriorityQueue<T> keyGroupedQueue) { List<Set<T>> result = new ArrayList<>(localKeyGroupRange.getNumberOfKeyGroups()); for (int keyGroup : localKeyGroupRange) { result.add(Collections.unmodifiableSet(keyGroupedQueue.getSubsetForKeyGroup(keyGroup))); } return result; } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.serialization.impl.portable.portablereader; import com.hazelcast.config.SerializationConfig; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.internal.serialization.impl.GenericRecordQueryReader; import com.hazelcast.nio.serialization.Portable; import com.hazelcast.query.impl.getters.MultiResult; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.SlowTest; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.io.IOException; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.GroupPortable; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.NestedGroupPortable; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitiveFields; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitiveFields.getPrimitiveArrays; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitiveFields.getPrimitives; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitivePortable; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitivePortable.Init.FULL; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitivePortable.Init.NONE; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.PrimitivePortable.Init.NULL; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.TestPortableFactory; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.group; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.nested; import static com.hazelcast.internal.serialization.impl.portable.portablereader.DefaultPortableReaderTestStructure.prim; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; import static org.junit.Assert.assertThat; /** * Tests that verifies the behavior of the DefaultPortableReader. * All tests cases are generated, since there's a lot of possible cases due to the long lists of read* method on the reader. * <p> * The test is parametrised with 4 parameters * Each test execution runs one read operation on the reader. * <p> * The rationale behind these tests is to cover all possible combinations of reads using nested paths and quantifiers * (number or any) with all possible portable types. It's impossible to do it manually, since there's 20 supported * types and a read method for each one of them. * <p> * Each test case is documented, plus each test outputs it's scenario in a readable way, so you it's easy to follow * the test case while you run it. Also each test case shows in which method it is generated. * <p> * IF YOU SEE A FAILURE HERE: * - check the test output - analyse the test scenario * - check in which method the scenario is generated - narrow down the scope of the tests run */ @RunWith(Parameterized.class) @Category({SlowTest.class, ParallelJVMTest.class}) public class DefaultPortableReaderSpecTest extends HazelcastTestSupport { private static final PrimitivePortable P_NON_EMPTY = new PrimitivePortable(0, PrimitivePortable.Init.FULL); private static final GroupPortable G_NON_EMPTY = group(FULL); private static final NestedGroupPortable N_NON_EMPTY = nested(new Portable[]{G_NON_EMPTY, G_NON_EMPTY}); @Rule public ExpectedException expected = ExpectedException.none(); // input object private Portable inputObject; // object or exception private Object expectedResult; // e.g. body.brain.iq private String pathToRead; // parent method of this test to identify it in case of failures private String parent; @Parameters(name = "{index}: {0}, read{2}, {3}") public static Collection<Object[]> parametrisationData() { List<Object[]> result = new ArrayList<>(); directPrimitiveScenarios(result); fromPortableToPrimitiveScenarios(result); fromPortableArrayToPrimitiveScenarios(result); fromPortableToPortableToPrimitiveScenarios(result); fromPortableToPortableArrayToPrimitiveScenarios(result); fromPortableArrayToPortableArrayToPrimitiveArrayAnyScenarios(result); fromPortableArrayAnyToPortableArrayAnyToPrimitiveScenarios(result); fromPortableArrayToPortableArrayToPrimitiveScenarios(result); fromPortableArrayToPortableArrayAnyScenarios(result); return result; } public DefaultPortableReaderSpecTest(Portable inputObject, Object expectedResult, String pathToRead, String parent) { this.inputObject = inputObject; this.expectedResult = expectedResult; this.pathToRead = pathToRead; this.parent = parent; } @Test @SuppressWarnings("unchecked") public void executeTestScenario() throws Exception { // handle result Object resultToMatch = expectedResult; if (expectedResult instanceof Class) { // expected exception case expected.expect(isA((Class) expectedResult)); } else if (expectedResult instanceof List) { // just convenience -> if result is a list if will be compared to an array, so it has to be converted resultToMatch = ((List) resultToMatch).toArray(); } // print test scenario for debug purposes // it makes debugging easier since all scenarios are generated printlnScenarioDescription(resultToMatch); // assert the condition Object result = reader(inputObject).read(pathToRead); if (result instanceof MultiResult) { MultiResult multiResult = (MultiResult) result; if (multiResult.getResults().size() == 1 && multiResult.getResults().get(0) == null && multiResult.isNullEmptyTarget()) { // explode null in case of a single multi-result target result result = null; } else { // in case of multi result while invoking generic "read" method deal with the multi results result = ((MultiResult) result).getResults().toArray(); } assertThat(result, equalTo(resultToMatch)); } else { assertThat(result, equalTo(resultToMatch)); } } private void printlnScenarioDescription(Object resultToMatch) { String desc = "Running test case:\n"; desc += "parent:\t" + parent + "\n"; desc += "path:\t" + pathToRead + "\n"; desc += "result:\t" + resultToMatch + "\n"; desc += "input:\t" + inputObject + "\n"; System.out.println(desc); } /** * Expands test cases for primitive non-array data types. * Word "primitive_" from the pathToExplode is replaced by each primitive type and the scenario is expanded to: * <ul> * <li>scenario(input, result.byte_, adjustedPath + "byte_"),</li> * <li>scenario(input, result.short_, adjustedPath + "short_"),</li> * <li>scenario(input, result.int_, adjustedPath + "int_"),</li> * <li>scenario(input, result.long_, adjustedPath + "long_"),</li> * <li>scenario(input, result.float_, adjustedPath + "float_"),</li> * <li>scenario(input, result.double_, adjustedPath + "double_"),</li> * <li>scenario(input, result.boolean_, adjustedPath + "boolean_"),</li> * <li>scenario(input, result.char_, adjustedPath + "char_"),</li> * <li>scenario(input, result.string_, adjustedPath + "string_"),</li> * </ul> */ private static Collection<Object[]> expandPrimitiveScenario(Portable input, Object result, String pathToExplode, String parent) { List<Object[]> scenarios = new ArrayList<>(); Object adjustedResult; for (PrimitiveFields primitiveFields : getPrimitives()) { if (result instanceof PrimitivePortable) { adjustedResult = ((PrimitivePortable) result).getPrimitive(primitiveFields); } else { adjustedResult = result; } // generic method case scenarios.add(scenario(input, adjustedResult, pathToExplode.replace("primitive_", primitiveFields.field), parent)); } return scenarios; } /** * Expands test cases for primitive array data types. * Word "primitiveArray" is replaced by each primitive array type and the scenario is expanded to for each type: * <p> * group A: * <ul> * <li>scenario(prim(FULL), prim(FULL).bytes, ByteArray, "bytes"),</li> * <li>scenario(prim(NONE), prim(NONE).bytes, ByteArray, "bytes"),</li> * <li>scenario(prim(NULL), prim(NULL).bytes, ByteArray, "bytes"),</li> * <p> * <li>scenario(prim(FULL), prim(FULL).bytes, ByteArray, "bytes[any]"),</li> * <li>scenario(prim(NONE), prim(NONE).bytes, ByteArray, "bytes[any]"),</li> * <li>scenario(prim(NULL), prim(NULL).bytes, ByteArray, "bytes[any]"),</li> * </ul> * <p> * group B: * <ul> * <li>scenario(prim(FULL), prim(FULL).bytes[0], Byte, "bytes[0]"),</li> * <li>scenario(prim(FULL), prim(FULL).bytes[1], Byte, "bytes[1]"),</li> * <li>scenario(prim(FULL), prim(FULL).bytes[2], Byte, "bytes[2]"),</li> * <p> * <li>for all primitives <ul> * <li>scenario(prim(NONE), null, Byte, "bytes[0]"),</li> * <li>scenario(prim(NULL), null, Byte, "bytes[1]"),</li> * </ul></li> * <p> * </ul> */ private static Collection<Object[]> expandPrimitiveArrayScenario(Portable input, PrimitivePortable result, String pathToExplode, String parent) { List<Object[]> scenarios = new ArrayList<>(); // group A: for (PrimitiveFields primitiveFields : getPrimitiveArrays()) { String path = pathToExplode.replace("primitiveArray", primitiveFields.field); Object resultToMatch = result != null ? result.getPrimitiveArray(primitiveFields) : null; Object resultToMatchAny = resultToMatch; if (resultToMatchAny != null && Array.getLength(resultToMatchAny) == 0) { resultToMatchAny = null; } scenarios.addAll(asList( scenario(input, resultToMatch, path, parent), scenario(input, resultToMatchAny, path + "[any]", parent) )); } // group B: for (PrimitiveFields primitiveFields : getPrimitives()) { String path = pathToExplode.replace("primitiveArray", primitiveFields.field).replace("_", "s"); if (result == null || result.getPrimitiveArray(primitiveFields) == null || Array.getLength(result.getPrimitiveArray(primitiveFields)) == 0) { scenarios.add(scenario(input, null, path + "[0]", parent)); } else { scenarios.addAll(asList( scenario(input, Array.get(result.getPrimitiveArray(primitiveFields), 0), path + "[0]", parent), scenario(input, Array.get(result.getPrimitiveArray(primitiveFields), 1), path + "[1]", parent), scenario(input, Array.get(result.getPrimitiveArray(primitiveFields), 2), path + "[2]", parent) )); } } return scenarios; } /** * Expands test cases for that navigate from portable array to a primitive field. * Word portableArray is replaced to: portables[0], portables[1], portables[2], portables[any] * Word "primitive_" is replaced by each primitive type and the scenario is expanded to for each type: * <p> * A.) The contract is that input should somewhere on the path contain an array of Portable[] which contains objects of type * PrimitivePortable. For example: "portableArray.primitive_" will be expanded two-fold, the portable array and primitive * types will be expanded as follows: * <ul> * <li>portables[0].byte, portables[0].short, portables[0].char, ... for all primitive types</li> * <li>portables[1].byte, portables[1].short, portables[1].char, ...</li> * <li>portables[2].byte, portables[2].short, portables[2].char, ...</li> * </ul> * <p> * B.) Then the [any] case will be expanded too: * <ul> * <li>portables[any].byte, portables[any].short, portables[any].char, ... for all primitive types</li> * </ul> * <p> * The expected result should be the object that contains the portable array - that's the general contract. * The result for assertion will be automatically calculated */ @SuppressWarnings({"unchecked"}) private static Collection<Object[]> expandPortableArrayPrimitiveScenario(Portable input, GroupPortable result, String pathToExplode, String parent) { List<Object[]> scenarios = new ArrayList<>(); // expansion of the portable array using the following quantifiers for (String token : asList("0", "1", "2", "any")) { String path = pathToExplode.replace("portableArray", "portables[" + token + "]"); if (token.equals("any")) { // B. case with [any] operator on portable array // expansion of the primitive fields for (PrimitiveFields primitiveFields : getPrimitives()) { List resultToMatch = new ArrayList(); int portableCount = 0; try { portableCount = result.portables.length; } catch (NullPointerException ignored) { } for (int i = 0; i < portableCount; i++) { PrimitivePortable portable = (PrimitivePortable) result.portables[i]; resultToMatch.add(portable.getPrimitive(primitiveFields)); } if (result == null || result.portables == null || result.portables.length == 0) { resultToMatch = null; } scenarios.add(scenario(input, resultToMatch, path.replace("primitive_", primitiveFields.field), parent)); } } else { // A. case with [0], [1], [2] operator on portable array // expansion of the primitive fields for (PrimitiveFields primitiveFields : getPrimitives()) { Object resultToMatch = null; try { PrimitivePortable portable = (PrimitivePortable) result.portables[Integer.parseInt(token)]; resultToMatch = portable.getPrimitive(primitiveFields); } catch (NullPointerException ignored) { } catch (IndexOutOfBoundsException ignored) { } if (result == null || result.portables == null || result.portables.length == 0) { resultToMatch = null; } scenarios.add(scenario(input, resultToMatch, path.replace("primitive_", primitiveFields.field), parent)); } } } return scenarios; } // ---------------------------------------------------------------------------------------------------------- // DIRECT primitive and primitive-array access // ---------------------------------------------------------------------------------------------------------- private static void directPrimitiveScenarios(List<Object[]> result) { String parent = "directPrimitiveScenarios"; // FULLy initialised primitive objects accessed directly result.addAll(expandPrimitiveScenario(prim(FULL), prim(FULL), "primitive_", parent)); // primitive arrays accessed directly (arrays are fully initialised, empty and null) result.addAll(expandPrimitiveArrayScenario(prim(FULL), prim(FULL), "primitiveArray", parent)); result.addAll(expandPrimitiveArrayScenario(prim(NONE), prim(NONE), "primitiveArray", parent)); result.addAll(expandPrimitiveArrayScenario(prim(NULL), prim(NULL), "primitiveArray", parent)); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE to primitive and primitive-array access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableToPrimitiveScenarios(List<Object[]> result) { String parent = "directPrimitiveScenariosWrongMethodType"; // FULLy initialised primitive objects accessed from portable result.addAll(expandPrimitiveScenario(group(prim(FULL)), prim(FULL), "portable.primitive_", parent)); // primitive arrays accessed from portable (arrays are fully initialised, empty and null) result.addAll(expandPrimitiveArrayScenario(group(prim(FULL)), prim(FULL), "portable.primitiveArray", parent)); result.addAll(expandPrimitiveArrayScenario(group(prim(NONE)), prim(NONE), "portable.primitiveArray", parent)); result.addAll(expandPrimitiveArrayScenario(group(prim(NULL)), prim(NULL), "portable.primitiveArray", parent)); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE-ARRAY to primitive and primitive-array access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableArrayToPrimitiveScenarios(List<Object[]> result) { String p = "fromPortableArrayToPrimitiveScenarios"; // FULLy initialised primitive objects accessed from portable stored in array GroupPortable fullGroupVarious = group(prim(1, FULL), prim(10, FULL), prim(100, FULL)); result.addAll(expandPortableArrayPrimitiveScenario(fullGroupVarious, fullGroupVarious, "portableArray.primitive_", p)); GroupPortable fullEmptyNullGroup = group(prim(1, FULL), prim(10, NONE), prim(100, NULL)); result.addAll(expandPortableArrayPrimitiveScenario(fullEmptyNullGroup, fullEmptyNullGroup, "portableArray.primitive_", p)); // empty or null portable array de-referenced further GroupPortable nullArrayGroup = new GroupPortable((Portable[]) null); result.addAll(expandPortableArrayPrimitiveScenario(nullArrayGroup, nullArrayGroup, "portableArray.primitive_", p)); GroupPortable emptyArrayGroup = new GroupPortable(new Portable[0]); result.addAll(expandPortableArrayPrimitiveScenario(emptyArrayGroup, emptyArrayGroup, "portableArray.primitive_", p)); // FULLy initialised primitive arrays accessed from portable stored in array GroupPortable fullGroup = group(prim(FULL), prim(FULL), prim(FULL)); result.addAll(expandPrimitiveArrayScenario(fullGroup, prim(FULL), "portables[0].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(fullGroup, prim(FULL), "portables[1].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(fullGroup, prim(FULL), "portables[2].primitiveArray", p)); // EMPTY primitive arrays accessed from portable stored in array GroupPortable noneGroup = group(prim(NONE), prim(NONE), prim(NONE)); result.addAll(expandPrimitiveArrayScenario(noneGroup, prim(NONE), "portables[0].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(noneGroup, prim(NONE), "portables[1].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(noneGroup, prim(NONE), "portables[2].primitiveArray", p)); // NULL primitive arrays accessed from portable stored in array GroupPortable nullGroup = group(prim(NULL), prim(NULL), prim(NULL)); result.addAll(expandPrimitiveArrayScenario(nullGroup, prim(NULL), "portables[0].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(nullGroup, prim(NULL), "portables[1].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(nullGroup, prim(NULL), "portables[2].primitiveArray", p)); // EMPTY portable array -> de-referenced further for primitive access result.addAll(expandPrimitiveScenario(emptyArrayGroup, null, "portables[0].primitive_", p)); result.addAll(expandPrimitiveScenario(emptyArrayGroup, null, "portables[1].primitive_", p)); result.addAll(expandPrimitiveScenario(emptyArrayGroup, null, "portables[2].primitive_", p)); result.add(scenario(emptyArrayGroup, null, "portables[0].string_", p)); result.add(scenario(emptyArrayGroup, null, "portables[1].string_", p)); // EMPTY portable array -> de-referenced further for array access result.addAll(expandPrimitiveArrayScenario(emptyArrayGroup, null, "portables[0].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(emptyArrayGroup, null, "portables[1].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(emptyArrayGroup, null, "portables[2].primitiveArray", p)); // NULL portable array -> de-referenced further for primitive access result.addAll(expandPrimitiveScenario(nullArrayGroup, null, "portables[0].primitive_", p)); result.addAll(expandPrimitiveScenario(nullArrayGroup, null, "portables[1].primitive_", p)); result.addAll(expandPrimitiveScenario(nullArrayGroup, null, "portables[2].primitive_", p)); result.add(scenario(nullArrayGroup, null, "portables[0].string_", p)); result.add(scenario(nullArrayGroup, null, "portables[1].string_", p)); // EMPTY portable array -> de-referenced further for array access result.addAll(expandPrimitiveArrayScenario(nullArrayGroup, null, "portables[0].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(nullArrayGroup, null, "portables[1].primitiveArray", p)); result.addAll(expandPrimitiveArrayScenario(nullArrayGroup, null, "portables[2].primitiveArray", p)); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE via PORTABLE to further access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableToPortableToPrimitiveScenarios(List<Object[]> result) { String p = "fromPortableToPortableToPrimitiveScenarios"; // FULLy initialised primitive objects accessed from portable stored in array NestedGroupPortable nestedFullGroup = nested(group(prim(1, FULL), prim(10, FULL), prim(100, FULL))); result.addAll(asList( scenario(nestedFullGroup, (nestedFullGroup.portable), "portable", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portable, "portable.portable", p) )); result.addAll(expandPrimitiveScenario(nestedFullGroup, ((GroupPortable) nestedFullGroup.portable).portable, "portable.portable.primitive_", p)); result.addAll(expandPrimitiveArrayScenario(nestedFullGroup, (PrimitivePortable) ((GroupPortable) nestedFullGroup.portable).portable, "portable.portable.primitiveArray", p)); NestedGroupPortable nestedFullEmptyNullGroup = nested(group(prim(1, FULL), prim(10, NONE), prim(100, NULL))); result.addAll(expandPrimitiveScenario(nestedFullEmptyNullGroup, ((GroupPortable) nestedFullEmptyNullGroup.portable).portable, "portable.portable.primitive_", p)); result.addAll(expandPrimitiveArrayScenario(nestedFullEmptyNullGroup, (PrimitivePortable) ((GroupPortable) nestedFullEmptyNullGroup.portable).portable, "portable.portable.primitiveArray", p)); // empty or null portable array de-referenced further NestedGroupPortable nestedNullArrayGroup = nested(new GroupPortable((Portable[]) null)); result.addAll(asList( scenario(nestedNullArrayGroup, (nestedNullArrayGroup.portable), "portable", p), scenario(nestedNullArrayGroup, null, "portable.portable", p) )); result.addAll(expandPrimitiveScenario(nestedNullArrayGroup, null, "portable.portable.primitive_", p)); result.addAll(expandPrimitiveArrayScenario(nestedNullArrayGroup, null, "portable.portable.primitiveArray", p)); NestedGroupPortable nestedNull = nested(new Portable[0]); result.addAll(asList( scenario(nestedNull, null, "portable", p), scenario(nestedNull, null, "portable.portable", p) )); result.addAll(expandPrimitiveScenario(nestedNull, null, "portable.portable.primitive_", p)); result.addAll(expandPrimitiveArrayScenario(nestedNull, null, "portable.portable.primitiveArray", p)); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE via PORTABLE_ARRAY to further access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableToPortableArrayToPrimitiveScenarios(List<Object[]> result) { // FULLy initialised primitive objects accessed from portable stored in array String p = "fromPortableToPortableToPrimitiveScenarios"; NestedGroupPortable nestedFullGroup = nested(group(prim(1, FULL), prim(10, FULL), prim(100, FULL))); result.addAll(asList( scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portables, "portable.portables", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portables, "portable.portables[any]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portables[0], "portable.portables[0]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portables[1], "portable.portables[1]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portable)).portables[2], "portable.portables[2]", p), scenario(nestedFullGroup, null, "portable.portables[12]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedFullGroup, (GroupPortable) nestedFullGroup.portable, "portable.portableArray.primitive_", p) ); NestedGroupPortable nestedFullEmptyNullGroup = nested(group(prim(1, FULL), prim(10, NONE), prim(100, NULL))); result.addAll(expandPortableArrayPrimitiveScenario(nestedFullEmptyNullGroup, (GroupPortable) nestedFullEmptyNullGroup.portable, "portable.portableArray.primitive_", p) ); // empty or null portable array de-referenced further NestedGroupPortable nestedNullArrayGroup = nested(new GroupPortable((Portable[]) null)); result.addAll(asList( scenario(nestedNullArrayGroup, null, "portable.portables", p), scenario(nestedNullArrayGroup, null, "portable.portables[any]", p), scenario(nestedNullArrayGroup, null, "portable.portables[0]", p), scenario(nestedNullArrayGroup, null, "portable.portables[1]", p), scenario(nestedNullArrayGroup, null, "portable.portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedNullArrayGroup, (GroupPortable) nestedNullArrayGroup.portable, "portable.portableArray.primitive_", p) ); NestedGroupPortable nestedEmptyArrayGroup = nested(new GroupPortable(new Portable[0])); result.addAll(asList( scenario(nestedEmptyArrayGroup, new Portable[0], "portable.portables", p), scenario(nestedEmptyArrayGroup, null, "portable.portables[any]", p), scenario(nestedEmptyArrayGroup, null, "portable.portables[0]", p), scenario(nestedEmptyArrayGroup, null, "portable.portables[1]", p), scenario(nestedEmptyArrayGroup, null, "portable.portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedEmptyArrayGroup, (GroupPortable) nestedEmptyArrayGroup.portable, "portable.portableArray.primitive_", p) ); NestedGroupPortable nestedEmpty = nested(new GroupPortable[0]); result.addAll(asList( scenario(nestedEmpty, null, "portable.portables", p), scenario(nestedEmpty, null, "portable.portables[any]", p), scenario(nestedEmpty, null, "portable.portables[0]", p), scenario(nestedEmpty, null, "portable.portables[1]", p), scenario(nestedEmpty, null, "portable.portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedEmpty, (GroupPortable) nestedEmpty.portable, "portable.portableArray.primitive_", p) ); NestedGroupPortable nestedNull = nested((GroupPortable[]) null); result.addAll(asList( scenario(nestedNull, null, "portable.portables", p), scenario(nestedNull, null, "portable.portables[any]", p), scenario(nestedNull, null, "portable.portables[0]", p), scenario(nestedNull, null, "portable.portables[1]", p), scenario(nestedNull, null, "portable.portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedNull, (GroupPortable) nestedNull.portable, "portable.portableArray.primitive_", p) ); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE_ARRAY[any] via PORTABLE_ARRAY[any] to further PRIMITIVE access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableArrayAnyToPortableArrayAnyToPrimitiveScenarios(List<Object[]> result) { String p = "fromPortableArrayAnyToPortableArrayAnyToPrimitiveScenarios"; // ============================================= // INPUT mixed // ============================================= PrimitivePortable p1 = prim(1, NONE); PrimitivePortable p10 = prim(10, FULL); PrimitivePortable p20 = prim(20, FULL); NestedGroupPortable input = nested( new Portable[]{ new GroupPortable(new Portable[0]), group(p1, p10), new GroupPortable((Portable[]) null), group(new PrimitivePortable[]{p20}), } ); result.addAll(asList( scenario(input, list(null, p1.byte_, p10.byte_, p20.byte_), "portables[any].portables[any].byte_", p), scenario(input, list(null, p1.short_, p10.short_, p20.short_), "portables[any].portables[any].short_", p), scenario(input, list(null, p1.int_, p10.int_, p20.int_), "portables[any].portables[any].int_", p), scenario(input, list(null, p1.long_, p10.long_, p20.long_), "portables[any].portables[any].long_", p), scenario(input, list(null, p1.char_, p10.char_, p20.char_), "portables[any].portables[any].char_", p), scenario(input, list(null, p1.float_, p10.float_, p20.float_), "portables[any].portables[any].float_", p), scenario(input, list(null, p1.double_, p10.double_, p20.double_), "portables[any].portables[any].double_", p), scenario(input, list(null, p1.boolean_, p10.boolean_, p20.boolean_), "portables[any].portables[any].boolean_", p), scenario(input, list(null, p1.string_, p10.string_, p20.string_), "portables[any].portables[any].string_", p) )); // ============================================= // INPUT empty // ============================================= NestedGroupPortable inputEmpty = nested( new Portable[0] ); result.addAll(asList( scenario(inputEmpty, null, "portables[any].portables[any].byte_", p), scenario(inputEmpty, null, "portables[any].portables[any].short_", p), scenario(inputEmpty, null, "portables[any].portables[any].int_", p), scenario(inputEmpty, null, "portables[any].portables[any].long_", p), scenario(inputEmpty, null, "portables[any].portables[any].char_", p), scenario(inputEmpty, null, "portables[any].portables[any].float_", p), scenario(inputEmpty, null, "portables[any].portables[any].double_", p), scenario(inputEmpty, null, "portables[any].portables[any].boolean_", p), scenario(inputEmpty, null, "portables[any].portables[any].string_", p) )); // ============================================= // INPUT null // ============================================= NestedGroupPortable inputNull = nested((Portable[]) null); result.addAll(asList( scenario(inputNull, null, "portables[any].portables[any].byte_", p), scenario(inputNull, null, "portables[any].portables[any].short_", p), scenario(inputNull, null, "portables[any].portables[any].int_", p), scenario(inputNull, null, "portables[any].portables[any].long_", p), scenario(inputNull, null, "portables[any].portables[any].char_", p), scenario(inputNull, null, "portables[any].portables[any].float_", p), scenario(inputNull, null, "portables[any].portables[any].double_", p), scenario(inputNull, null, "portables[any].portables[any].boolean_", p), scenario(inputNull, null, "portables[any].portables[any].string_", p) )); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE_ARRAY[any] via PORTABLE_ARRAY[any] to further PRIMITIVE_ARRAY[any] access // ---------------------------------------------------------------------------------------------------------- private static void fromPortableArrayToPortableArrayToPrimitiveArrayAnyScenarios(List<Object[]> result) { String method = "fromPortableArrayToPortableArrayToPrimitiveArrayAnyScenarios"; String p = method + " mixed"; // ============================================= // INPUT mixed // ============================================= NestedGroupPortable input = nested( new Portable[]{ new GroupPortable(new Portable[0]), group(prim(1, NONE), prim(10, FULL), prim(50, NULL)), new GroupPortable((Portable[]) null), group(prim(20, FULL), prim(70, NULL)), } ); PrimitivePortable p10 = prim(10, FULL); PrimitivePortable p20 = prim(20, FULL); result.addAll(asList( scenario(input, list(null, p10.bytes, p20.bytes), "portables[any].portables[any].bytes[any]", p), scenario(input, list(null, p10.shorts, p20.shorts), "portables[any].portables[any].shorts[any]", p), scenario(input, list(null, p10.ints, p20.ints), "portables[any].portables[any].ints[any]", p), scenario(input, list(null, p10.longs, p20.longs), "portables[any].portables[any].longs[any]", p), scenario(input, list(null, p10.chars, p20.chars), "portables[any].portables[any].chars[any]", p), scenario(input, list(null, p10.floats, p20.floats), "portables[any].portables[any].floats[any]", p), scenario(input, list(null, p10.doubles, p20.doubles), "portables[any].portables[any].doubles[any]", p), scenario(input, list(null, p10.booleans, p20.booleans), "portables[any].portables[any].booleans[any]", p), scenario(input, list(null, p10.strings, p20.strings), "portables[any].portables[any].strings[any]", p) )); // ============================================= // INPUT empty // ============================================= p = method + " empty"; NestedGroupPortable inputEmpty = nested( new Portable[0] ); result.addAll(asList( scenario(inputEmpty, null, "portables[any].portables[any].bytes[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].shorts[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].ints[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].longs[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].chars[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].floats[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].doubles[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].booleans[any]", p), scenario(inputEmpty, null, "portables[any].portables[any].strings[any]", p) )); // ============================================= // INPUT null // ============================================= p = method + " null"; NestedGroupPortable inputNull = nested((Portable[]) null); result.addAll(asList( scenario(inputNull, null, "portables[any].portables[any].bytes[any]", p), scenario(inputNull, null, "portables[any].portables[any].shorts[any]", p), scenario(inputNull, null, "portables[any].portables[any].ints[any]", p), scenario(inputNull, null, "portables[any].portables[any].longs[any]", p), scenario(inputNull, null, "portables[any].portables[any].chars[any]", p), scenario(inputNull, null, "portables[any].portables[any].floats[any]", p), scenario(inputNull, null, "portables[any].portables[any].doubles[any]", p), scenario(inputNull, null, "portables[any].portables[any].booleans[any]", p), scenario(inputNull, null, "portables[any].portables[any].strings[any]", p) )); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE_ARRAY to PORTABLE_ARRAY access + further // ---------------------------------------------------------------------------------------------------------- private static void fromPortableArrayToPortableArrayToPrimitiveScenarios(List<Object[]> result) { String p = "fromPortableArrayToPortableArrayToPrimitiveScenarios"; // FULLy initialised primitive objects accessed from portable stored in array NestedGroupPortable nestedFullGroup = nested(group(prim(1, FULL), prim(10, FULL), prim(100, FULL))); result.addAll(asList( scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables, "portables[0].portables", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables, "portables[0].portables[any]", p), scenario(nestedFullGroup, prim(1, FULL), "portables[any].portables[0]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables, "portables[any].portables[any]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables[0], "portables[0].portables[0]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables[1], "portables[0].portables[1]", p), scenario(nestedFullGroup, ((GroupPortable) (nestedFullGroup.portables[0])).portables[2], "portables[0].portables[2]", p), scenario(nestedFullGroup, null, "portables[0].portables[12]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedFullGroup, (GroupPortable) nestedFullGroup.portable, "portables[0].portableArray.primitive_", p) ); NestedGroupPortable anyGroup = nested(new Portable[]{ group(prim(1, FULL), prim(10, NONE), prim(50, NULL)), group(prim(2, FULL), prim(20, NONE), prim(80, NULL)), }); result.addAll(expandPortableArrayPrimitiveScenario(anyGroup, (GroupPortable) anyGroup.portables[0], "portables[0].portableArray.primitive_", p) ); // empty or null portable array de-referenced further NestedGroupPortable nestedNullArrayGroup = nested(new GroupPortable((Portable[]) null)); result.addAll(asList( scenario(nestedNullArrayGroup, null, "portables[0].portables", p), scenario(nestedNullArrayGroup, null, "portables[0].portables[any]", p), scenario(nestedNullArrayGroup, null, "portables[any].portables[0]", p), scenario(nestedNullArrayGroup, null, "portables[any].portables[any]", p), scenario(nestedNullArrayGroup, null, "portables[0].portables[0]", p), scenario(nestedNullArrayGroup, null, "portables[0].portables[1]", p), scenario(nestedNullArrayGroup, null, "portables[0].portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedNullArrayGroup, (GroupPortable) nestedNullArrayGroup.portable, "portables[0].portableArray.primitive_", p) ); NestedGroupPortable nestedEmptyArrayGroup = nested(new GroupPortable(new Portable[0])); result.addAll(asList( scenario(nestedEmptyArrayGroup, new Portable[0], "portables[0].portables", p), scenario(nestedEmptyArrayGroup, null, "portables[0].portables[any]", p), scenario(nestedEmptyArrayGroup, null, "portables[any].portables[0]", p), scenario(nestedEmptyArrayGroup, null, "portables[any].portables[any]", p), scenario(nestedEmptyArrayGroup, null, "portables[0].portables[0]", p), scenario(nestedEmptyArrayGroup, null, "portables[0].portables[1]", p), scenario(nestedEmptyArrayGroup, null, "portables[0].portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedEmptyArrayGroup, (GroupPortable) nestedEmptyArrayGroup.portable, "portables[0].portableArray.primitive_", p) ); NestedGroupPortable nestedEmpty = nested(new GroupPortable[0]); result.addAll(asList( scenario(nestedEmpty, null, "portables[0].portables", p), scenario(nestedEmpty, null, "portables[0].portables[any]", p), scenario(nestedEmpty, null, "portables[any].portables[0]", p), scenario(nestedEmpty, null, "portables[any].portables[any]", p), scenario(nestedEmpty, null, "portables[0].portables[0]", p), scenario(nestedEmpty, null, "portables[0].portables[1]", p), scenario(nestedEmpty, null, "portables[0].portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedEmpty, (GroupPortable) nestedEmpty.portable, "portables[0].portableArray.primitive_", p) ); NestedGroupPortable nestedNull = nested((GroupPortable[]) null); result.addAll(asList( scenario(nestedNull, null, "portables[0].portables", p), scenario(nestedNull, null, "portables[0].portables[any]", p), scenario(nestedNull, null, "portables[any].portables[0]", p), scenario(nestedNull, null, "portables[any].portables[any]", p), scenario(nestedNull, null, "portables[0].portables[0]", p), scenario(nestedNull, null, "portables[0].portables[1]", p), scenario(nestedNull, null, "portables[0].portables[2]", p) )); result.addAll(expandPortableArrayPrimitiveScenario(nestedNull, (GroupPortable) nestedNull.portable, "portables[0].portableArray.primitive_", p) ); } // ---------------------------------------------------------------------------------------------------------- // from PORTABLE_ARRAY to PORTABLE_ARRAY access + further // ---------------------------------------------------------------------------------------------------------- private static void fromPortableArrayToPortableArrayAnyScenarios(List<Object[]> result) { String p = "fromPortableArrayToPortableArrayAnyScenarios"; NestedGroupPortable anyGroup = nested(new Portable[]{ group(prim(1, FULL), prim(10, NONE), prim(50, NULL)), group(prim(2, FULL), prim(20, NONE), prim(80, NULL)), }); result.addAll(asList( scenario(anyGroup, ((GroupPortable) (anyGroup.portables[0])).portables, "portables[0].portables[any]", p), scenario(anyGroup, new Portable[]{prim(1, FULL), prim(2, FULL)}, "portables[any].portables[0]", p), scenario(anyGroup, new Portable[]{prim(10, FULL), prim(20, FULL)}, "portables[any].portables[1]", p), scenario(anyGroup, new Portable[]{prim(50, FULL), prim(80, FULL)}, "portables[any].portables[2]", p), scenario(anyGroup, new Portable[]{prim(1, FULL), prim(10, FULL), prim(50, FULL), prim(2, FULL), prim(20, FULL), prim(80, FULL), }, "portables[any].portables[any]", p) )); NestedGroupPortable nestedEmptyArrayGroup = nested(new Portable[]{new GroupPortable(new Portable[0]), group(prim(1, FULL), prim(10, NONE), prim(50, NULL)), }); result.addAll(asList( scenario(nestedEmptyArrayGroup, null, "portables[0].portables[any]", p), scenario(nestedEmptyArrayGroup, new Portable[]{null, prim(1, FULL)}, "portables[any].portables[0]", p), scenario(nestedEmptyArrayGroup, new Portable[]{null, prim(10, FULL)}, "portables[any].portables[1]", p), scenario(nestedEmptyArrayGroup, new Portable[]{null, prim(50, FULL)}, "portables[any].portables[2]", p), scenario(nestedEmptyArrayGroup, new Portable[]{null, prim(1, FULL), prim(10, FULL), prim(50, FULL)}, "portables[any].portables[any]", p) )); NestedGroupPortable nestedNullArrayGroup = nested(new Portable[]{new GroupPortable((Portable[]) null), group(prim(1, FULL), prim(10, NONE), prim(50, NULL)), }); result.addAll(asList( scenario(nestedNullArrayGroup, null, "portables[0].portables[any]", p), scenario(nestedNullArrayGroup, new Portable[]{null, prim(1, FULL)}, "portables[any].portables[0]", p), scenario(nestedNullArrayGroup, new Portable[]{null, prim(10, FULL)}, "portables[any].portables[1]", p), scenario(nestedNullArrayGroup, new Portable[]{null, prim(50, FULL)}, "portables[any].portables[2]", p), scenario(nestedNullArrayGroup, new Portable[]{null, prim(1, FULL), prim(10, FULL), prim(50, FULL)}, "portables[any].portables[any]", p) )); } // // Test data structure utilities // private static Object[] scenario(Portable input, Object result, String path, String parent) { return new Object[]{input, result, path, parent}; } /** * Unwraps input objects if they are arrays or lists and adds all to an output list. */ @SuppressWarnings("unchecked") private static <T> List<T> list(T... objects) { List<T> result = new ArrayList<>(); for (T object : objects) { if (object == null) { //noinspection ConstantConditions result.add(object); } else if (object.getClass().isArray()) { int length = Array.getLength(object); for (int i = 0; i < length; i++) { result.add((T) Array.get(object, i)); } } else if (object instanceof Collection) { result.addAll((Collection<T>) object); } else { result.add(object); } } return result; } // // Hazelcast init Utilities // public GenericRecordQueryReader reader(Portable portable) throws IOException { SerializationConfig serializationConfig = new SerializationConfig(); serializationConfig.addPortableFactory(TestPortableFactory.ID, new TestPortableFactory()); InternalSerializationService ss = new DefaultSerializationServiceBuilder().setConfig(serializationConfig).build(); // put fully initialised object to a map // avoid the case where there's no class definition ss.toData(P_NON_EMPTY); ss.toData(G_NON_EMPTY); ss.toData(N_NON_EMPTY); Data data = ss.toData(portable); return new GenericRecordQueryReader(ss.readAsInternalGenericRecord(data)); } }
package util.misc; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.Serializable; import java.util.Comparator; /** * [Borrowed from StanfordNLP] * * Pair is a Class for holding mutable pairs of objects. * * <i>Implementation note:</i> * on a 32-bit JVM uses ~ 8 (this) + 4 (first) + 4 (second) = 16 bytes. * on a 64-bit JVM uses ~ 16 (this) + 8 (first) + 8 (second) = 32 bytes. * * Many applications use a lot of Pairs so it's good to keep this * number small. * * @author Dan Klein * @author Christopher Manning (added stuff from Kristina's, rounded out) * @version 2002/08/25 */ public class Pair <T1,T2> implements Comparable<Pair<T1,T2>>, Serializable { /** * Direct access is deprecated. Use first(). * * @serial */ public T1 first; /** * Direct access is deprecated. Use second(). * * @serial */ public T2 second; public Pair() { // first = null; second = null; -- default initialization } public Pair(T1 first, T2 second) { this.first = first; this.second = second; } public T1 first() { return first; } public T2 second() { return second; } public void setFirst(T1 o) { first = o; } public void setSecond(T2 o) { second = o; } @Override public String toString() { return "(" + first + "," + second + ")"; } @Override public boolean equals(Object o) { if (o instanceof Pair) { @SuppressWarnings("rawtypes") Pair p = (Pair) o; return (first == null ? p.first() == null : first.equals(p.first())) && (second == null ? p.second() == null : second.equals(p.second())); } else { return false; } } @Override public int hashCode() { int firstHash = (first == null ? 0 : first.hashCode()); int secondHash = (second == null ? 0 : second.hashCode()); return firstHash*31 + secondHash; } // public List<Object> asList() { // return CollectionUtils.makeList(first, second); // } /** * Read a string representation of a Pair from a DataStream. * This might not work correctly unless the pair of objects are of type * <code>String</code>. */ public static Pair<String, String> readStringPair(DataInputStream in) { Pair<String, String> p = new Pair<String, String>(); try { p.first = in.readUTF(); p.second = in.readUTF(); } catch (Exception e) { e.printStackTrace(); } return p; } /** * Returns a Pair constructed from X and Y. Convenience method; the * compiler will disambiguate the classes used for you so that you * don't have to write out potentially long class names. */ public static <X, Y> Pair<X, Y> makePair(X x, Y y) { return new Pair<X, Y>(x, y); } /** * Write a string representation of a Pair to a DataStream. * The <code>toString()</code> method is called on each of the pair * of objects and a <code>String</code> representation is written. * This might not allow one to recover the pair of objects unless they * are of type <code>String</code>. */ public void save(DataOutputStream out) { try { out.writeUTF(first.toString()); out.writeUTF(second.toString()); } catch (Exception e) { e.printStackTrace(); } } /** * Compares this <code>Pair</code> to another object. * If the object is a <code>Pair</code>, this function will work providing * the elements of the <code>Pair</code> are themselves comparable. * It will then return a value based on the pair of objects, where * <code>p &gt; q iff p.first() &gt; q.first() || * (p.first().equals(q.first()) && p.second() &gt; q.second())</code>. * If the other object is not a <code>Pair</code>, it throws a * <code>ClassCastException</code>. * * @param another the <code>Object</code> to be compared. * @return the value <code>0</code> if the argument is a * <code>Pair</code> equal to this <code>Pair</code>; a value less than * <code>0</code> if the argument is a <code>Pair</code> * greater than this <code>Pair</code>; and a value * greater than <code>0</code> if the argument is a * <code>Pair</code> less than this <code>Pair</code>. * @throws ClassCastException if the argument is not a * <code>Pair</code>. * @see java.lang.Comparable */ @SuppressWarnings("unchecked") public int compareTo(Pair<T1,T2> another) { int comp = ((Comparable<T1>) first()).compareTo(another.first()); if (comp != 0) { return comp; } else { return ((Comparable<T2>) second()).compareTo(another.second()); } } /** * If first and second are Strings, then this returns an MutableInternedPair * where the Strings have been interned, and if this Pair is serialized * and then deserialized, first and second are interned upon * deserialization. * * @param p A pair of Strings * @return MutableInternedPair, with same first and second as this. */ public static Pair<String, String> stringIntern(Pair<String, String> p) { return new MutableInternedPair(p); } /** * Returns an MutableInternedPair where the Strings have been interned. * This is a factory method for creating an * MutableInternedPair. It requires the arguments to be Strings. * If this Pair is serialized * and then deserialized, first and second are interned upon * deserialization. * <p><i>Note:</i> I put this in thinking that its use might be * faster than calling <code>x = new Pair(a, b).stringIntern()</code> * but it's not really clear whether this is true. * * @param first The first object * @param second The second object * @return An MutableInternedPair, with given first and second */ public static Pair<String, String> internedStringPair(String first, String second) { return new MutableInternedPair(first, second); } /** * use serialVersionUID for cross version serialization compatibility */ private static final long serialVersionUID = 1360822168806852921L; static class MutableInternedPair extends Pair<String, String> { private MutableInternedPair(Pair<String, String> p) { super(p.first, p.second); internStrings(); } private MutableInternedPair(String first, String second) { super(first, second); internStrings(); } protected Object readResolve() { internStrings(); return this; } private void internStrings() { if (first != null) { first = first.intern(); } if (second != null) { second = second.intern(); } } // use serialVersionUID for cross version serialization compatibility private static final long serialVersionUID = 1360822168806852922L; } // /** // * {@inheritDoc} // */ // public void prettyLog(RedwoodChannels channels, String description) { // PrettyLogger.log(channels, description, this.asList()); // } /** * Compares a <code>Pair</code> to another <code>Pair</code> according to the first object of the pair only * This function will work providing * the first element of the <code>Pair</code> is comparable, otherwise will throw a * <code>ClassCastException</code> * @author jonathanberant * * @param <T1> * @param <T2> */ public static class ByFirstPairComparator<T1,T2> implements Comparator<Pair<T1,T2>> { @SuppressWarnings("unchecked") @Override public int compare(Pair<T1, T2> pair1, Pair<T1, T2> pair2) { return ((Comparable<T1>) pair1.first()).compareTo(pair2.first()); } } /** * Compares a <code>Pair</code> to another <code>Pair</code> according to the first object of the pair only in decreasing order * This function will work providing * the first element of the <code>Pair</code> is comparable, otherwise will throw a * <code>ClassCastException</code> * @author jonathanberant * * @param <T1> * @param <T2> */ public static class ByFirstReversePairComparator<T1,T2> implements Comparator<Pair<T1,T2>> { @SuppressWarnings("unchecked") @Override public int compare(Pair<T1, T2> pair1, Pair<T1, T2> pair2) { return -((Comparable<T1>) pair1.first()).compareTo(pair2.first()); } } /** * Compares a <code>Pair</code> to another <code>Pair</code> according to the second object of the pair only * This function will work providing * the first element of the <code>Pair</code> is comparable, otherwise will throw a * <code>ClassCastException</code> * @author jonathanberant * * @param <T1> * @param <T2> */ public static class BySecondPairComparator<T1,T2> implements Comparator<Pair<T1,T2>> { @SuppressWarnings("unchecked") @Override public int compare(Pair<T1, T2> pair1, Pair<T1, T2> pair2) { return ((Comparable<T2>) pair1.second()).compareTo(pair2.second()); } } /** * Compares a <code>Pair</code> to another <code>Pair</code> according to the second object of the pair only in decreasing order * This function will work providing * the first element of the <code>Pair</code> is comparable, otherwise will throw a * <code>ClassCastException</code> * @author jonathanberant * * @param <T1> * @param <T2> */ public static class BySecondReversePairComparator<T1,T2> implements Comparator<Pair<T1,T2>> { @SuppressWarnings("unchecked") @Override public int compare(Pair<T1, T2> pair1, Pair<T1, T2> pair2) { return -((Comparable<T2>) pair1.second()).compareTo(pair2.second()); } } }
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.hibernate3; import java.util.ArrayList; import java.util.List; import javax.transaction.RollbackException; import javax.transaction.Status; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import junit.framework.TestCase; import org.easymock.MockControl; import org.hibernate.FlushMode; import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.SessionFactory; import org.hibernate.classic.Session; import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.engine.SessionImplementor; import org.springframework.dao.DataAccessException; import org.springframework.transaction.MockJtaTransaction; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.UnexpectedRollbackException; import org.springframework.transaction.jta.JtaTransactionManager; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; /** * @author Juergen Hoeller * @since 05.03.2005 */ public class HibernateJtaTransactionTests extends TestCase { public void testJtaTransactionCommit() throws Exception { doTestJtaTransactionCommit(Status.STATUS_NO_TRANSACTION, false); } public void testJtaTransactionCommitWithReadOnly() throws Exception { doTestJtaTransactionCommit(Status.STATUS_NO_TRANSACTION, true); } public void testJtaTransactionCommitWithExisting() throws Exception { doTestJtaTransactionCommit(Status.STATUS_ACTIVE, false); } public void testJtaTransactionCommitWithExistingAndReadOnly() throws Exception { doTestJtaTransactionCommit(Status.STATUS_ACTIVE, true); } private void doTestJtaTransactionCommit(int status, final boolean readOnly) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); final MockControl queryControl = MockControl.createControl(Query.class); Query query = (Query) queryControl.getMock(); ut.getStatus(); utControl.setReturnValue(status, 1); if (status == Status.STATUS_NO_TRANSACTION) { ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.commit(); utControl.setVoidCallable(1); } else { ut.getStatus(); utControl.setReturnValue(status, 1); } final List list = new ArrayList(); list.add("test"); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); session.isOpen(); sessionControl.setReturnValue(true, 1); session.createQuery("some query string"); sessionControl.setReturnValue(query, 1); if (readOnly) { session.setFlushMode(FlushMode.NEVER); sessionControl.setVoidCallable(1); } query.list(); queryControl.setReturnValue(list, 1); utControl.replay(); sfControl.replay(); sessionControl.replay(); queryControl.replay(); JtaTransactionManager ptm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(ptm); tt.setReadOnly(readOnly); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); Object result = tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); ht = new HibernateTemplate(sf); List htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); return sess.createQuery("some query string").list(); } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); sessionControl.verify(); queryControl.verify(); sessionControl.reset(); if (!readOnly) { session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); } session.close(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); return htl; } catch (Error err) { err.printStackTrace(); throw err; } } }); assertTrue("Correct result list", result == list); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); utControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionCommitWithJtaTm() throws Exception { doTestJtaTransactionCommitWithJtaTm(Status.STATUS_NO_TRANSACTION); } public void testJtaTransactionCommitWithJtaTmAndExisting() throws Exception { doTestJtaTransactionCommitWithJtaTm(Status.STATUS_ACTIVE); } private void doTestJtaTransactionCommitWithJtaTm(int status) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(status, 2); if (status == Status.STATUS_NO_TRANSACTION) { ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 1); ut.commit(); utControl.setVoidCallable(1); } MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.getTransactionManager(); sfControl.setReturnValue(tm, 1); sf.openSession(); sfControl.setReturnValue(session, 1); session.isOpen(); sessionControl.setReturnValue(true, 1); utControl.replay(); tmControl.replay(); sfControl.replay(); sessionControl.replay(); JtaTransactionManager ptm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(ptm); final List l = new ArrayList(); l.add("test"); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); Object result = tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); List htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return l; } }); ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return l; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); sessionControl.verify(); sessionControl.reset(); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); return htl; } catch (Error err) { err.printStackTrace(); throw err; } } }); assertTrue("Correct result list", result == l); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); utControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionWithFlushFailure() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 1); ut.rollback(); utControl.setVoidCallable(1); utControl.replay(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); session.isOpen(); sessionControl.setReturnValue(true, 1); sfControl.replay(); sessionControl.replay(); JtaTransactionManager ptm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(ptm); final List l = new ArrayList(); l.add("test"); final HibernateException flushEx = new HibernateException("flush failure"); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); List htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return l; } }); ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return l; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); sessionControl.verify(); sessionControl.reset(); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setThrowable(flushEx); session.close(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); return htl; } catch (Error err) { err.printStackTrace(); throw err; } } }); } catch (DataAccessException ex) { // expected assertTrue(flushEx == ex.getCause()); } assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); utControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionRollback() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 1); ut.rollback(); utControl.setVoidCallable(1); utControl.replay(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); sfControl.replay(); sessionControl.replay(); JtaTransactionManager ptm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(ptm); final List l = new ArrayList(); l.add("test"); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); Object result = tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); HibernateTemplate ht = new HibernateTemplate(sf); List htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session session) { return l; } }); status.setRollbackOnly(); sessionControl.verify(); sessionControl.reset(); session.close(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); return htl; } catch (Error err) { err.printStackTrace(); throw err; } } }); assertTrue("Correct result list", result == l); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); utControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionCommitWithPreBound() throws Exception { doTestJtaTransactionCommitWithPreBound(false, false, false); } public void testJtaTransactionCommitWithPreBoundAndReadOnly() throws Exception { doTestJtaTransactionCommitWithPreBound(false, false, true); } public void testJtaTransactionCommitWithPreBoundAndFlushModeNever() throws Exception { doTestJtaTransactionCommitWithPreBound(false, true, false); } public void testJtaTransactionCommitWithPreBoundAndFlushModeNeverAndReadOnly() throws Exception { doTestJtaTransactionCommitWithPreBound(false, true, true); } public void testJtaTransactionCommitWithJtaTmAndPreBound() throws Exception { doTestJtaTransactionCommitWithPreBound(true, false, false); } public void testJtaTransactionCommitWithJtaTmAndPreBoundAndReadOnly() throws Exception { doTestJtaTransactionCommitWithPreBound(true, false, true); } public void testJtaTransactionCommitWithJtaTmAndPreBoundAndFlushModeNever() throws Exception { doTestJtaTransactionCommitWithPreBound(true, true, false); } public void testJtaTransactionCommitWithJtaTmAndPreBoundAndFlushModeNeverAndReadOnly() throws Exception { doTestJtaTransactionCommitWithPreBound(true, true, true); } protected void doTestJtaTransactionCommitWithPreBound( boolean jtaTm, final boolean flushNever, final boolean readOnly) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.begin(); utControl.setVoidCallable(1); ut.commit(); utControl.setVoidCallable(1); MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); if (jtaTm) { MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 1); } MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(ExtendedSession.class); final ExtendedSession session = (ExtendedSession) sessionControl.getMock(); sf.getTransactionManager(); sfControl.setReturnValue((jtaTm ? tm : null), 1); session.isOpen(); sessionControl.setReturnValue(true, 5); session.getFlushMode(); if (flushNever) { sessionControl.setReturnValue(FlushMode.NEVER, 1); if (!readOnly) { session.setFlushMode(FlushMode.AUTO); sessionControl.setVoidCallable(1); } } else { sessionControl.setReturnValue(FlushMode.AUTO, 1); } utControl.replay(); tmControl.replay(); sfControl.replay(); sessionControl.replay(); TransactionSynchronizationManager.bindResource(sf, new SessionHolder(session)); try { JtaTransactionManager ptm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(ptm); tt.setReadOnly(readOnly); final List l = new ArrayList(); l.add("test"); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); Object result = tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); List htl = null; for (int i = 0; i < 5; i++) { htl = ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return l; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); } sessionControl.verify(); sessionControl.reset(); if (!readOnly) { session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); if (flushNever) { session.setFlushMode(FlushMode.NEVER); sessionControl.setVoidCallable(1); } } session.afterTransactionCompletion(true, null); sessionControl.setVoidCallable(1); session.disconnect(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); return htl; } catch (Error err) { err.printStackTrace(); throw err; } } }); assertTrue("Correct result list", result == l); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); } finally { TransactionSynchronizationManager.unbindResource(sf); } utControl.verify(); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionRollbackWithPreBound() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.getStatus(); utControl.setReturnValue(Status.STATUS_MARKED_ROLLBACK, 2); ut.begin(); utControl.setVoidCallable(1); ut.setRollbackOnly(); utControl.setVoidCallable(1); RollbackException rex = new RollbackException(); ut.commit(); utControl.setThrowable(rex, 1); utControl.replay(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); session.isOpen(); sessionControl.setReturnValue(true, 5); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); sfControl.replay(); sessionControl.replay(); TransactionSynchronizationManager.bindResource(sf, new SessionHolder(session)); try { JtaTransactionManager ptm = new JtaTransactionManager(ut); final TransactionTemplate tt = new TransactionTemplate(ptm); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); tt.execute(new TransactionCallbackWithoutResult() { public void doInTransactionWithoutResult(TransactionStatus status) { tt.execute(new TransactionCallbackWithoutResult() { public void doInTransactionWithoutResult(TransactionStatus status) { status.setRollbackOnly(); try { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.execute(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); } sessionControl.verify(); sessionControl.reset(); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.disconnect(); sessionControl.setReturnValue(null, 1); session.clear(); sessionControl.setVoidCallable(1); sessionControl.replay(); } catch (Error err) { err.printStackTrace(); throw err; } } }); } }); fail("Should have thrown UnexpectedRollbackException"); } catch (UnexpectedRollbackException ex) { // expected assertEquals(rex, ex.getCause()); } finally { TransactionSynchronizationManager.unbindResource(sf); } utControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaTransactionCommitWithRequiresNew() throws Exception { doTestJtaTransactionWithRequiresNew(false); } public void testJtaTransactionRollbackWithRequiresNew() throws Exception { doTestJtaTransactionWithRequiresNew(true); } protected void doTestJtaTransactionWithRequiresNew(final boolean rollback) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockControl tx1Control = MockControl.createControl(javax.transaction.Transaction.class); javax.transaction.Transaction tx1 = (javax.transaction.Transaction) tx1Control.getMock(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); MockControl session1Control = MockControl.createControl(Session.class); final Session session1 = (Session) session1Control.getMock(); MockControl session2Control = MockControl.createControl(Session.class); final Session session2 = (Session) session2Control.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 3); ut.begin(); utControl.setVoidCallable(2); tm.suspend(); tmControl.setReturnValue(tx1, 1); tm.resume(tx1); tmControl.setVoidCallable(1); if (rollback) { ut.rollback(); } else { ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.commit(); } utControl.setVoidCallable(2); sf.openSession(); sfControl.setReturnValue(session1, 1); sf.openSession(); sfControl.setReturnValue(session2, 1); session1.getSessionFactory(); session1Control.setReturnValue(sf, 1); session2.getSessionFactory(); session2Control.setReturnValue(sf, 1); session1.isOpen(); session1Control.setReturnValue(true, 1); session2.isOpen(); session2Control.setReturnValue(true, 1); session2.getFlushMode(); session2Control.setReturnValue(FlushMode.AUTO, 1); if (!rollback) { session1.getFlushMode(); session1Control.setReturnValue(FlushMode.AUTO, 1); session2.getFlushMode(); session2Control.setReturnValue(FlushMode.AUTO, 1); session1.flush(); session1Control.setVoidCallable(1); session2.flush(); session2Control.setVoidCallable(2); } session1.disconnect(); session1Control.setReturnValue(null, 1); session1.close(); session1Control.setReturnValue(null, 1); session2.close(); session2Control.setReturnValue(null, 1); utControl.replay(); tmControl.replay(); sfControl.replay(); session1Control.replay(); session2Control.replay(); JtaTransactionManager ptm = new JtaTransactionManager(); ptm.setUserTransaction(ut); ptm.setTransactionManager(tm); final TransactionTemplate tt = new TransactionTemplate(ptm); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { org.hibernate.Session outerSession = SessionFactoryUtils.getSession(sf, false); assertSame(session1, outerSession); SessionHolder holder = (SessionHolder) TransactionSynchronizationManager.getResource(sf); assertTrue("Has thread session", holder != null); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { org.hibernate.Session innerSession = SessionFactoryUtils.getSession(sf, false); assertSame(session2, innerSession); HibernateTemplate ht = new HibernateTemplate(sf); ht.setFlushMode(HibernateTemplate.FLUSH_EAGER); return ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session innerSession) { if (rollback) { throw new HibernateException(""); } return null; } }); } }); return null; } finally { assertTrue("Same thread session as before", outerSession == SessionFactoryUtils.getSession(sf, false)); } } }); if (rollback) { fail("Should have thrown DataAccessException"); } } catch (DataAccessException ex) { if (!rollback) { throw ex; } } finally { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); } utControl.verify(); tmControl.verify(); sfControl.verify(); session1Control.verify(); session2Control.verify(); } public void testJtaTransactionWithRequiresNewAndSuspendException() throws Exception { doTestJtaTransactionWithRequiresNewAndException(true); } public void testJtaTransactionWithRequiresNewAndBeginException() throws Exception { doTestJtaTransactionWithRequiresNewAndException(false); } protected void doTestJtaTransactionWithRequiresNewAndException(boolean suspendException) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockControl txControl = MockControl.createControl(javax.transaction.Transaction.class); javax.transaction.Transaction tx = (javax.transaction.Transaction) txControl.getMock(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); MockControl session1Control = MockControl.createControl(Session.class); final Session session1 = (Session) session1Control.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.begin(); utControl.setVoidCallable(1); if (suspendException) { tm.suspend(); tmControl.setThrowable(new SystemException(), 1); } else { tm.suspend(); tmControl.setReturnValue(tx, 1); ut.begin(); utControl.setThrowable(new SystemException(), 1); tm.resume(tx); tmControl.setVoidCallable(1); } ut.rollback(); utControl.setVoidCallable(1); sf.openSession(); sfControl.setReturnValue(session1, 1); session1.getSessionFactory(); session1Control.setReturnValue(sf, 1); session1.disconnect(); session1Control.setReturnValue(null, 1); session1.close(); session1Control.setReturnValue(null, 1); utControl.replay(); tmControl.replay(); sfControl.replay(); session1Control.replay(); JtaTransactionManager ptm = new JtaTransactionManager(); ptm.setUserTransaction(ut); ptm.setTransactionManager(tm); final TransactionTemplate tt = new TransactionTemplate(ptm); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { org.hibernate.Session outerSession = SessionFactoryUtils.getSession(sf, false); assertSame(session1, outerSession); SessionHolder holder = (SessionHolder) TransactionSynchronizationManager.getResource(sf); assertTrue("Has thread session", holder != null); tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { return null; } }); return null; } }); fail("Should have thrown TransactionException"); } catch (TransactionException ex) { // expected } finally { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); } utControl.verify(); tmControl.verify(); sfControl.verify(); session1Control.verify(); } public void testJtaTransactionCommitWithRequiresNewAndJtaTm() throws Exception { doTestJtaTransactionWithRequiresNewAndJtaTm(false); } public void testJtaTransactionRollbackWithRequiresNewAndJtaTm() throws Exception { doTestJtaTransactionWithRequiresNewAndJtaTm(true); } protected void doTestJtaTransactionWithRequiresNewAndJtaTm(final boolean rollback) throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockControl tx1Control = MockControl.createControl(javax.transaction.Transaction.class); javax.transaction.Transaction tx1 = (javax.transaction.Transaction) tx1Control.getMock(); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); MockControl session1Control = MockControl.createControl(Session.class); final Session session1 = (Session) session1Control.getMock(); MockControl session2Control = MockControl.createControl(Session.class); final Session session2 = (Session) session2Control.getMock(); MockJtaTransaction transaction1 = new MockJtaTransaction(); MockJtaTransaction transaction2 = new MockJtaTransaction(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 3); ut.begin(); utControl.setVoidCallable(2); tm.getTransaction(); tmControl.setReturnValue(transaction1, 1); tm.suspend(); tmControl.setReturnValue(tx1, 1); tm.getTransaction(); tmControl.setReturnValue(transaction2, 1); tm.resume(tx1); tmControl.setVoidCallable(1); if (rollback) { ut.rollback(); } else { ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); ut.commit(); } utControl.setVoidCallable(2); sf.getTransactionManager(); sfControl.setReturnValue(tm, 2); sf.openSession(); sfControl.setReturnValue(session1, 1); sf.openSession(); sfControl.setReturnValue(session2, 1); session1.isOpen(); session1Control.setReturnValue(true, 1); session2.isOpen(); session2Control.setReturnValue(true, 1); session2.getFlushMode(); session2Control.setReturnValue(FlushMode.AUTO, 1); if (!rollback) { session1.getFlushMode(); session1Control.setReturnValue(FlushMode.AUTO, 1); session2.getFlushMode(); session2Control.setReturnValue(FlushMode.AUTO, 1); session1.flush(); session1Control.setVoidCallable(1); session2.flush(); session2Control.setVoidCallable(2); } session1.disconnect(); session1Control.setReturnValue(null, 1); session1.close(); session1Control.setReturnValue(null, 1); session2.close(); session2Control.setReturnValue(null, 1); utControl.replay(); tmControl.replay(); sfControl.replay(); session1Control.replay(); session2Control.replay(); JtaTransactionManager ptm = new JtaTransactionManager(); ptm.setUserTransaction(ut); ptm.setTransactionManager(tm); final TransactionTemplate tt = new TransactionTemplate(ptm); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { org.hibernate.Session outerSession = SessionFactoryUtils.getSession(sf, false); assertSame(session1, outerSession); SessionHolder holder = (SessionHolder) TransactionSynchronizationManager.getResource(sf); assertTrue("Has thread session", holder != null); try { tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { org.hibernate.Session innerSession = SessionFactoryUtils.getSession(sf, false); assertSame(session2, innerSession); HibernateTemplate ht = new HibernateTemplate(sf); ht.setFlushMode(HibernateTemplate.FLUSH_EAGER); return ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session innerSession) { if (rollback) { throw new HibernateException(""); } return null; } }); } }); return null; } finally { assertTrue("Same thread session as before", outerSession == SessionFactoryUtils.getSession(sf, false)); } } }); if (rollback) { fail("Should have thrown DataAccessException"); } } catch (DataAccessException ex) { if (!rollback) { throw ex; } } finally { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); } utControl.verify(); tmControl.verify(); sfControl.verify(); session1Control.verify(); session2Control.verify(); } public void testTransactionWithPropagationSupports() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 1); ut.rollback(); utControl.setVoidCallable(1); utControl.replay(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.NEVER, 1); session.setFlushMode(FlushMode.AUTO); sessionControl.setVoidCallable(1); session.flush(); sessionControl.setVoidCallable(1); session.setFlushMode(FlushMode.NEVER); sessionControl.setVoidCallable(1); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.NEVER, 1); session.close(); sessionControl.setReturnValue(null, 1); sfControl.replay(); sessionControl.replay(); JtaTransactionManager tm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(tm); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("Is not new transaction", !status.isNewTransaction()); assertFalse(TransactionSynchronizationManager.isCurrentTransactionReadOnly()); assertFalse(TransactionSynchronizationManager.isActualTransactionActive()); HibernateTemplate ht = new HibernateTemplate(sf); ht.setFlushMode(HibernateTemplate.FLUSH_EAGER); ht.execute(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session session) { return null; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); return null; } }); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); sfControl.verify(); sessionControl.verify(); } public void testTransactionWithPropagationSupportsAndInnerTransaction() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); ut.begin(); utControl.setVoidCallable(1); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 1); ut.rollback(); utControl.setVoidCallable(1); utControl.replay(); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sf, 1); session.isOpen(); sessionControl.setReturnValue(true, 1); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 3); session.flush(); sessionControl.setVoidCallable(3); session.close(); sessionControl.setReturnValue(null, 1); sfControl.replay(); sessionControl.replay(); JtaTransactionManager tm = new JtaTransactionManager(ut); TransactionTemplate tt = new TransactionTemplate(tm); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS); final TransactionTemplate tt2 = new TransactionTemplate(tm); tt2.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("Is not new transaction", !status.isNewTransaction()); assertFalse(TransactionSynchronizationManager.isCurrentTransactionReadOnly()); assertFalse(TransactionSynchronizationManager.isActualTransactionActive()); HibernateTemplate ht = new HibernateTemplate(sf); ht.setFlushMode(HibernateTemplate.FLUSH_EAGER); ht.execute(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session session) { return null; } }); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); tt2.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { HibernateTemplate ht = new HibernateTemplate(sf); ht.setFlushMode(HibernateTemplate.FLUSH_EAGER); return ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session session) { assertFalse(TransactionSynchronizationManager.isCurrentTransactionReadOnly()); //assertTrue(TransactionSynchronizationManager.isActualTransactionActive()); return null; } }); } }); assertFalse(TransactionSynchronizationManager.isCurrentTransactionReadOnly()); assertFalse(TransactionSynchronizationManager.isActualTransactionActive()); return null; } }); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronization() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithRollback() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.close(); sessionControl.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.afterCompletion(Status.STATUS_ROLLEDBACK); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithRollbackByOtherThread() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 7); tm.getStatus(); tmControl.setReturnValue(Status.STATUS_NO_TRANSACTION, 1); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 2); sf.getTransactionManager(); sfControl.setReturnValue(tm, 7); session.isOpen(); sessionControl.setReturnValue(true, 8); session.setFlushMode(FlushMode.NEVER); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 2); tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); final HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } final Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); Thread thread = new Thread() { public void run() { synchronization.afterCompletion(Status.STATUS_ROLLEDBACK); } }; thread.start(); thread.join(); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); TransactionTemplate tt = new TransactionTemplate(new JtaTransactionManager(tm)); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_SUPPORTS); tt.setReadOnly(true); tt.execute(new TransactionCallbackWithoutResult() { protected void doInTransactionWithoutResult(TransactionStatus status) { assertTrue("JTA synchronizations active", TransactionSynchronizationManager.isSynchronizationActive()); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } } }); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithFlushFailure() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); tm.setRollbackOnly(); tmControl.setVoidCallable(1); final HibernateException flushEx = new HibernateException("flush failure"); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setThrowable(flushEx, 1); session.close(); sessionControl.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); try { synchronization.beforeCompletion(); fail("Should have thrown HibernateSystemException"); } catch (HibernateSystemException ex) { assertSame(flushEx, ex.getCause()); } synchronization.afterCompletion(Status.STATUS_ROLLEDBACK); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithSuspendedTransaction() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction1 = new MockJtaTransaction(); MockJtaTransaction transaction2 = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction1, 2); tm.getTransaction(); tmControl.setReturnValue(transaction2, 3); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl session1Control = MockControl.createControl(Session.class); final Session session1 = (Session) session1Control.getMock(); final MockControl session2Control = MockControl.createControl(Session.class); final Session session2 = (Session) session2Control.getMock(); sf.openSession(); sfControl.setReturnValue(session1, 1); sf.openSession(); sfControl.setReturnValue(session2, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 5); session1.getFlushMode(); session1Control.setReturnValue(FlushMode.AUTO, 1); session2.getFlushMode(); session2Control.setReturnValue(FlushMode.AUTO, 1); session1.flush(); session1Control.setVoidCallable(1); session2.flush(); session2Control.setVoidCallable(1); session1.close(); session1Control.setReturnValue(null, 1); session2.close(); session2Control.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); session1Control.replay(); session2Control.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session1, sess); return null; } }); ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session2, sess); return null; } }); Synchronization synchronization2 = transaction2.getSynchronization(); assertTrue("JTA synchronization registered", synchronization2 != null); synchronization2.beforeCompletion(); synchronization2.afterCompletion(Status.STATUS_COMMITTED); Synchronization synchronization1 = transaction1.getSynchronization(); assertTrue("JTA synchronization registered", synchronization1 != null); synchronization1.beforeCompletion(); synchronization1.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); session1Control.verify(); session2Control.verify(); } public void testJtaSessionSynchronizationWithNonSessionFactoryImplementor() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactory.class); final SessionFactory sf = (SessionFactory) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); MockControl sfiControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sfi = (SessionFactoryImplementor) sfiControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); session.getSessionFactory(); sessionControl.setReturnValue(sfi, 6); sfi.getTransactionManager(); sfiControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); sessionControl.replay(); sfiControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA Synchronization registered", synchronization != null); synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); sfiControl.verify(); } public void testJtaSessionSynchronizationWithSpringTransactionLaterOn() throws Exception { MockControl utControl = MockControl.createControl(UserTransaction.class); UserTransaction ut = (UserTransaction) utControl.getMock(); MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); ut.getStatus(); utControl.setReturnValue(Status.STATUS_ACTIVE, 2); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.openSession(); sfControl.setReturnValue(session, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 1); utControl.replay(); tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); final HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 2; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } TransactionTemplate tt = new TransactionTemplate(new JtaTransactionManager(ut)); tt.execute(new TransactionCallbackWithoutResult() { protected void doInTransactionWithoutResult(TransactionStatus status) { for (int i = 2; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } } }); Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); utControl.verify(); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithPreBound() throws Exception { doTestJtaSessionSynchronizationWithPreBound(false); } public void testJtaJtaSessionSynchronizationWithPreBoundAndFlushNever() throws Exception { doTestJtaSessionSynchronizationWithPreBound(true); } private void doTestJtaSessionSynchronizationWithPreBound(boolean flushNever) throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 5); session.getFlushMode(); if (flushNever) { sessionControl.setReturnValue(FlushMode.NEVER, 1); session.setFlushMode(FlushMode.AUTO); sessionControl.setVoidCallable(1); } else { sessionControl.setReturnValue(FlushMode.AUTO, 1); } tmControl.replay(); sfControl.replay(); sessionControl.replay(); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); TransactionSynchronizationManager.bindResource(sf, new SessionHolder(session)); try { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } sessionControl.verify(); sessionControl.reset(); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); if (flushNever) { session.setFlushMode(FlushMode.NEVER); sessionControl.setVoidCallable(1); } session.disconnect(); sessionControl.setReturnValue(null, 1); sessionControl.replay(); Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); } finally { TransactionSynchronizationManager.unbindResource(sf); } assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } public void testJtaSessionSynchronizationWithRemoteTransaction() throws Exception { MockControl tmControl = MockControl.createControl(TransactionManager.class); TransactionManager tm = (TransactionManager) tmControl.getMock(); MockJtaTransaction transaction = new MockJtaTransaction(); MockControl sfControl = MockControl.createControl(SessionFactoryImplementor.class); final SessionFactoryImplementor sf = (SessionFactoryImplementor) sfControl.getMock(); final MockControl sessionControl = MockControl.createControl(Session.class); final Session session = (Session) sessionControl.getMock(); for (int j = 0; j < 2; j++) { tmControl.reset(); sfControl.reset(); sessionControl.reset(); tm.getTransaction(); tmControl.setReturnValue(transaction, 6); sf.openSession(); sfControl.setReturnValue(session, 1); sf.getTransactionManager(); sfControl.setReturnValue(tm, 6); session.isOpen(); sessionControl.setReturnValue(true, 4); session.getFlushMode(); sessionControl.setReturnValue(FlushMode.AUTO, 1); session.flush(); sessionControl.setVoidCallable(1); session.close(); sessionControl.setReturnValue(null, 1); tmControl.replay(); sfControl.replay(); sessionControl.replay(); if (j == 0) { assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); } else { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); } HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } final Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); // Call synchronization in a new thread, to simulate a synchronization // triggered by a new remote call from a remote transaction coordinator. Thread synch = new Thread() { public void run() { synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); } }; synch.start(); synch.join(); assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); SessionHolder sessionHolder = (SessionHolder) TransactionSynchronizationManager.getResource(sf); assertTrue("Thread session holder empty", sessionHolder.isEmpty()); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); tmControl.verify(); sfControl.verify(); sessionControl.verify(); } TransactionSynchronizationManager.unbindResource(sf); } protected void tearDown() { assertTrue(TransactionSynchronizationManager.getResourceMap().isEmpty()); assertFalse(TransactionSynchronizationManager.isSynchronizationActive()); assertFalse(TransactionSynchronizationManager.isCurrentTransactionReadOnly()); assertFalse(TransactionSynchronizationManager.isActualTransactionActive()); } /** * Interface that combines Hibernate's Session and SessionImplementor interface. * Necessary for creating a mock that implements both interfaces. * Note: Hibernate 3.1's SessionImplementor interface does not extend Session anymore. */ public static interface ExtendedSession extends Session, SessionImplementor { } }
package io.compgen.ngsutils.cli.bam; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMRecordIterator; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.ValidationStringency; import io.compgen.cmdline.annotation.Command; import io.compgen.cmdline.annotation.Exec; import io.compgen.cmdline.annotation.Option; import io.compgen.cmdline.annotation.UnnamedArg; import io.compgen.cmdline.exceptions.CommandArgumentException; import io.compgen.cmdline.impl.AbstractOutputCommand; import io.compgen.common.IterUtils; import io.compgen.common.TabWriter; import io.compgen.common.progress.IncrementingStats; import io.compgen.common.progress.ProgressUtils; import io.compgen.ngsutils.NGSUtils; import io.compgen.ngsutils.annotation.GenomeSpan; import io.compgen.ngsutils.bam.Orientation; import io.compgen.ngsutils.bam.Strand; import io.compgen.ngsutils.bam.support.ReadUtils; import io.compgen.ngsutils.cli.bam.count.BedSpans; import io.compgen.ngsutils.cli.bam.count.BinSpans; import io.compgen.ngsutils.cli.bam.count.GTFSpans; import io.compgen.ngsutils.cli.bam.count.SpanGroup; import io.compgen.ngsutils.cli.bam.count.SpanSource; @Command(name="bam-count", desc="Counts the number of reads for genes (GTF), within a BED region, or by bins (--gtf, --bed, or --bins required)", category="bam") public class BamCount extends AbstractOutputCommand { private String samFilename=null; private String bedFilename=null; private String gtfFilename=null; private int binSize = 0; private boolean contained = false; private boolean lenient = false; private boolean silent = false; private boolean proper = false; private boolean insert = false; private boolean inverted = false; private boolean unique = false; private boolean startOnly = false; private int filterFlags = 0; private int requiredFlags = 0; private Orientation orient = Orientation.UNSTRANDED; @Option(desc = "Only keep properly paired reads", name = "proper-pairs") public void setProperPairs(boolean val) { if (val) { requiredFlags |= ReadUtils.PROPER_PAIR_FLAG; filterFlags |= ReadUtils.MATE_UNMAPPED_FLAG; } } @Option(desc = "Filtering flags", name = "filter-flags", defaultValue = "3844") public void setFilterFlags(int flag) { filterFlags = flag; } @Option(desc = "Required flags", name = "required-flags", defaultValue = "0") public void setRequiredFlags(int flag) { requiredFlags = flag; } @UnnamedArg(name = "FILE") public void setFilename(String filename) { samFilename = filename; } @Option(desc="Count bins of size [value]", name="bins", defaultValue="0") public void setBinSize(int binSize) { this.binSize = binSize; } @Option(desc="Count reads for genes (GTF model)", name="gtf", helpValue="fname") public void setGTFFile(String gtfFilename) { this.gtfFilename = gtfFilename; } @Option(desc="Count reads within BED regions", name="bed", helpValue="fname") public void setBedFile(String bedFilename) { this.bedFilename = bedFilename; } @Option(desc="Use lenient validation strategy", name="lenient") public void setLenient(boolean lenient) { this.lenient = lenient; } @Option(desc="Use silent validation strategy", name="silent") public void setSilent(boolean silent) { this.silent = silent; } @Option(desc="Only count uniquely mapped reads (requires NH or IH tags)", name="unique") public void setUnique(boolean unique) { this.unique = unique; } @Option(desc="Read must be completely contained within region", name="contained") public void setContained(boolean contained) { this.contained = contained; } @Option(desc="Library is in FR orientation", name="library-fr") public void setLibraryFR(boolean val) { if (val) { orient = Orientation.FR; } } @Option(desc="Library is in RF orientation", name="library-rf") public void setLibraryRF(boolean val) { if (val) { orient = Orientation.RF; } } @Option(desc="Library is in unstranded orientation (default)", name="library-unstranded") public void setLibraryUnstranded(boolean val) { if (val) { orient = Orientation.UNSTRANDED; } } @Option(desc="Also report the number/ratio of properly-paired reads", name="report-proper") public void setReportProperPairs(boolean val) { proper = val; } @Option(desc="Also report the average insert-size of reads", name="insert-size") public void setInsertSize(boolean val) { insert = val; } @Option(desc="Also report the number of inverted reads (FF,RR)", name="inverted") public void setInverted(boolean val) { inverted = val; } @Option(desc="Only count the starting mapped position (strand specific, for pairs - only counts the first pair)", name="startonly") public void setStartOnly(boolean val) { startOnly = val; } @Exec public void exec() throws CommandArgumentException, IOException { int sources = 0; if (binSize > 0) { sources++; } if (bedFilename != null) { sources++; } if (gtfFilename != null) { sources++; } if (sources != 1) { throw new CommandArgumentException("You must specify one of --bins, --bed, --vcf or --gtf!"); } SamReaderFactory readerFactory = SamReaderFactory.makeDefault(); if (lenient) { readerFactory.validationStringency(ValidationStringency.LENIENT); } else if (silent) { readerFactory.validationStringency(ValidationStringency.SILENT); } TabWriter writer = new TabWriter(out); writer.write_line("## program: " + NGSUtils.getVersion()); writer.write_line("## cmd: " + NGSUtils.getArgs()); writer.write_line("## input: " + samFilename); writer.write_line("## library-orientation: " + orient.toString()); writer.write_line("## counts: number of reads "); if (proper) { writer.write_line("## counts: number of properly-paired reads (and not-proper pairs, and not-proper:proper ratio) "); } if (insert) { writer.write_line("## counts: average insert-size "); } if (inverted) { writer.write_line("## counts: number of inverted (FF, RR) reads "); } if (startOnly) { writer.write_line("## counts: starting positions only "); } SamReader reader = readerFactory.open(new File(samFilename)); String name; SpanSource spanSource = null; if (binSize > 0) { writer.write_line("## source: bins " + binSize); spanSource = new BinSpans(reader.getFileHeader().getSequenceDictionary(), binSize, orient); name = "bins - "+ binSize; } else if (bedFilename != null) { writer.write_line("## source: bed " + bedFilename); spanSource = new BedSpans(bedFilename); name = bedFilename; } else if (gtfFilename != null) { writer.write_line("## source: gtf " + gtfFilename); spanSource = new GTFSpans(gtfFilename); name = gtfFilename; } else { reader.close(); writer.close(); throw new CommandArgumentException("You must specify either a bin-size, a BED file, or a GTF file!"); } // write header cols for (String header: spanSource.getHeader()) { writer.write(header); } writer.write("read_count"); if (proper) { writer.write("proper"); writer.write("not_proper"); writer.write("proper_ratio"); } if (insert) { writer.write("ave_insert_size"); } if (inverted) { writer.write("inverted_count"); } writer.eol(); int spanCount = 0; boolean missingReferences = false; for (SpanGroup spanGroup: IterUtils.wrap(ProgressUtils.getIterator(name, spanSource.iterator(), new IncrementingStats(spanSource.size())))) { if (spanGroup == null) { continue; } spanCount ++; if (verbose && spanCount % 1000 == 0) { System.err.println("[" +spanCount + "]" + spanGroup.getRefName()+":"+spanGroup.getStart()); System.err.flush(); } if (reader.getFileHeader().getSequence(spanGroup.getRefName()) == null) { missingReferences = true; continue; } int count = 0; int proper_count = 0; int notproper_count = 0; int insert_count = 0; long insert_acc = 0; int inverted_count = 0; Set<String> reads = new HashSet<String>(); for (GenomeSpan span: spanGroup) { int spanStart = span.start+1; int spanEnd = span.end; SAMRecordIterator it = reader.query(spanGroup.getRefName(), spanStart, spanEnd, contained); while (it.hasNext()) { SAMRecord read = it.next(); if ((read.getFlags() & requiredFlags) != requiredFlags) { // if missing a required flag, skip continue; } if ((read.getFlags() & filterFlags) > 0) { // if has any filter flag, skip continue; } if (!reads.contains(read.getReadName())) { if (spanGroup.getStrand() == Strand.NONE || orient == Orientation.UNSTRANDED || (ReadUtils.getFragmentEffectiveStrand(read, orient) == spanGroup.getStrand())) { if (startOnly) { if (read.getReadPairedFlag() && read.getSecondOfPairFlag()) { continue; } int startpos; if (ReadUtils.getFragmentEffectiveStrand(read, orient) == Strand.PLUS) { startpos = read.getAlignmentStart()-1; } else { startpos = read.getAlignmentEnd(); } if (!span.contains(new GenomeSpan(spanGroup.getRefName(), startpos))) { continue; } } // is any part of the read w/in the span? boolean inspan = false; for (int j=1; j<=read.getReadLength(); j++) { int refpos = read.getReferencePositionAtReadPosition(j) - 1; if (spanStart <= refpos && refpos < spanEnd) { inspan=true; break; } } if (!inspan) { continue; } reads.add(read.getReadName()); if (unique && !ReadUtils.isReadUniquelyMapped(read)) { continue; } count ++; if (proper) { if (read.getReadPairedFlag() && read.getProperPairFlag()) { proper_count ++; } else if (read.getReadPairedFlag() && !read.getProperPairFlag()) { notproper_count ++; } } if (insert) { if (read.getReadPairedFlag() && read.getProperPairFlag()) { insert_acc += Math.abs(read.getInferredInsertSize()); insert_count ++; } } if (inverted) { if (read.getReadPairedFlag() && read.getProperPairFlag() && read.getReadNegativeStrandFlag() == read.getMateNegativeStrandFlag()) { inverted_count ++; } } } } } it.close(); } writer.write(spanGroup.getFields()); writer.write(count); if (proper) { writer.write(proper_count); writer.write(notproper_count); if (proper_count > 0) { writer.write((double) notproper_count / proper_count); } else { writer.write(0); } } if (insert) { if (insert_count > 0) { writer.write((double) insert_acc / insert_count); } else { writer.write(0); } } if (inverted) { writer.write(inverted_count); } writer.eol(); } writer.close(); reader.close(); if (missingReferences) { System.err.println("WARNING: Some references/chromosomes in the GTF file were not found in the BAM file"); } } protected int calcTranscriptSize(int[] starts, int[] ends) { List<Integer[]> intervals = new ArrayList<Integer[]>(); for (int i=0; i<starts.length; i++) { boolean found = false; for (Integer[] interval: intervals) { int qstart = interval[0]; int qend = interval[1]; if ( (qstart < starts[i] && starts[i] < qend) || (qstart < ends[i] && ends[i] < qend) || (starts[i] < qstart && qstart < ends[i]) || (starts[i] < qend && qend < ends[i]) ) { found = true; interval[0] = Math.min(qstart, starts[i]); interval[1] = Math.max(qend, ends[i]); break; } } if (!found) { intervals.add(new Integer[] { starts[i], ends[i]}); } } int acc = 0; for (Integer[] interval: intervals) { acc += (interval[1] - interval[0]); } return acc; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.mqtt; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import com.google.auto.value.AutoValue; import java.io.IOException; import java.io.Serializable; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.UUID; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.fusesource.mqtt.client.BlockingConnection; import org.fusesource.mqtt.client.MQTT; import org.fusesource.mqtt.client.Message; import org.fusesource.mqtt.client.QoS; import org.fusesource.mqtt.client.Topic; import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An unbounded source for MQTT broker. * * <h3>Reading from a MQTT broker</h3> * * <p>MqttIO source returns an unbounded {@link PCollection} containing MQTT message payloads (as * {@code byte[]}). * * <p>To configure a MQTT source, you have to provide a MQTT connection configuration including * {@code ClientId}, a {@code ServerURI}, a {@code Topic} pattern, and optionally {@code username} * and {@code password} to connect to the MQTT broker. The following example illustrates various * options for configuring the source: * * <pre>{@code * pipeline.apply( * MqttIO.read() * .withConnectionConfiguration(MqttIO.ConnectionConfiguration.create( * "tcp://host:11883", * "my_topic")) * * }</pre> * * <h3>Writing to a MQTT broker</h3> * * <p>MqttIO sink supports writing {@code byte[]} to a topic on a MQTT broker. * * <p>To configure a MQTT sink, as for the read, you have to specify a MQTT connection configuration * with {@code ServerURI}, {@code Topic}, ... * * <p>The MqttIO only fully supports QoS 1 (at least once). It's the only QoS level guaranteed due * to potential retries on bundles. * * <p>For instance: * * <pre>{@code * pipeline * .apply(...) // provide PCollection<byte[]> * .MqttIO.write() * .withConnectionConfiguration(MqttIO.ConnectionConfiguration.create( * "tcp://host:11883", * "my_topic")) * * }</pre> */ @Experimental(Experimental.Kind.SOURCE_SINK) public class MqttIO { private static final Logger LOG = LoggerFactory.getLogger(MqttIO.class); public static Read read() { return new AutoValue_MqttIO_Read.Builder() .setMaxReadTime(null) .setMaxNumRecords(Long.MAX_VALUE) .build(); } public static Write write() { return new AutoValue_MqttIO_Write.Builder().setRetained(false).build(); } private MqttIO() {} /** A POJO describing a MQTT connection. */ @AutoValue public abstract static class ConnectionConfiguration implements Serializable { abstract String getServerUri(); abstract String getTopic(); @Nullable abstract String getClientId(); @Nullable abstract String getUsername(); @Nullable abstract String getPassword(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract Builder setServerUri(String serverUri); abstract Builder setTopic(String topic); abstract Builder setClientId(String clientId); abstract Builder setUsername(String username); abstract Builder setPassword(String password); abstract ConnectionConfiguration build(); } /** * Describe a connection configuration to the MQTT broker. This method creates an unique random * MQTT client ID. * * @param serverUri The MQTT broker URI. * @param topic The MQTT getTopic pattern. * @return A connection configuration to the MQTT broker. */ public static ConnectionConfiguration create(String serverUri, String topic) { checkArgument(serverUri != null, "serverUri can not be null"); checkArgument(topic != null, "topic can not be null"); return new AutoValue_MqttIO_ConnectionConfiguration.Builder() .setServerUri(serverUri) .setTopic(topic) .build(); } /** * Describe a connection configuration to the MQTT broker. * * @param serverUri The MQTT broker URI. * @param topic The MQTT getTopic pattern. * @param clientId A client ID prefix, used to construct an unique client ID. * @return A connection configuration to the MQTT broker. * @deprecated This constructor will be removed in a future version of Beam, please use * #create(String, String)} and {@link #withClientId(String)} instead. */ @Deprecated public static ConnectionConfiguration create(String serverUri, String topic, String clientId) { checkArgument(clientId != null, "clientId can not be null"); return create(serverUri, topic).withClientId(clientId); } /** Set up the MQTT broker URI. */ public ConnectionConfiguration withServerUri(String serverUri) { checkArgument(serverUri != null, "serverUri can not be null"); return builder().setServerUri(serverUri).build(); } /** Set up the MQTT getTopic pattern. */ public ConnectionConfiguration withTopic(String topic) { checkArgument(topic != null, "topic can not be null"); return builder().setTopic(topic).build(); } /** Set up the client ID prefix, which is used to construct an unique client ID. */ public ConnectionConfiguration withClientId(String clientId) { checkArgument(clientId != null, "clientId can not be null"); return builder().setClientId(clientId).build(); } public ConnectionConfiguration withUsername(String username) { checkArgument(username != null, "username can not be null"); return builder().setUsername(username).build(); } public ConnectionConfiguration withPassword(String password) { checkArgument(password != null, "password can not be null"); return builder().setPassword(password).build(); } private void populateDisplayData(DisplayData.Builder builder) { builder.add(DisplayData.item("serverUri", getServerUri())); builder.add(DisplayData.item("topic", getTopic())); builder.addIfNotNull(DisplayData.item("clientId", getClientId())); builder.addIfNotNull(DisplayData.item("username", getUsername())); } private MQTT createClient() throws Exception { LOG.debug("Creating MQTT client to {}", getServerUri()); MQTT client = new MQTT(); client.setHost(getServerUri()); if (getUsername() != null) { LOG.debug("MQTT client uses username {}", getUsername()); client.setUserName(getUsername()); client.setPassword(getPassword()); } if (getClientId() != null) { String clientId = getClientId() + "-" + UUID.randomUUID().toString(); LOG.debug("MQTT client id set to {}", clientId); client.setClientId(clientId); } else { String clientId = UUID.randomUUID().toString(); LOG.debug("MQTT client id set to random value {}", clientId); client.setClientId(clientId); } return client; } } /** A {@link PTransform} to read from a MQTT broker. */ @AutoValue public abstract static class Read extends PTransform<PBegin, PCollection<byte[]>> { @Nullable abstract ConnectionConfiguration connectionConfiguration(); abstract long maxNumRecords(); @Nullable abstract Duration maxReadTime(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract Builder setConnectionConfiguration(ConnectionConfiguration config); abstract Builder setMaxNumRecords(long maxNumRecords); abstract Builder setMaxReadTime(Duration maxReadTime); abstract Read build(); } /** Define the MQTT connection configuration used to connect to the MQTT broker. */ public Read withConnectionConfiguration(ConnectionConfiguration configuration) { checkArgument(configuration != null, "configuration can not be null"); return builder().setConnectionConfiguration(configuration).build(); } /** * Define the max number of records received by the {@link Read}. When this max number of * records is lower than {@code Long.MAX_VALUE}, the {@link Read} will provide a bounded {@link * PCollection}. */ public Read withMaxNumRecords(long maxNumRecords) { return builder().setMaxNumRecords(maxNumRecords).build(); } /** * Define the max read time (duration) while the {@link Read} will receive messages. When this * max read time is not null, the {@link Read} will provide a bounded {@link PCollection}. */ public Read withMaxReadTime(Duration maxReadTime) { return builder().setMaxReadTime(maxReadTime).build(); } @Override public PCollection<byte[]> expand(PBegin input) { org.apache.beam.sdk.io.Read.Unbounded<byte[]> unbounded = org.apache.beam.sdk.io.Read.from(new UnboundedMqttSource(this)); PTransform<PBegin, PCollection<byte[]>> transform = unbounded; if (maxNumRecords() < Long.MAX_VALUE || maxReadTime() != null) { transform = unbounded.withMaxReadTime(maxReadTime()).withMaxNumRecords(maxNumRecords()); } return input.getPipeline().apply(transform); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); connectionConfiguration().populateDisplayData(builder); if (maxNumRecords() != Long.MAX_VALUE) { builder.add(DisplayData.item("maxNumRecords", maxNumRecords())); } builder.addIfNotNull(DisplayData.item("maxReadTime", maxReadTime())); } } /** * Checkpoint for an unbounded MQTT source. Consists of the MQTT messages waiting to be * acknowledged and oldest pending message timestamp. */ @VisibleForTesting static class MqttCheckpointMark implements UnboundedSource.CheckpointMark, Serializable { @VisibleForTesting String clientId; @VisibleForTesting Instant oldestMessageTimestamp = Instant.now(); @VisibleForTesting transient List<Message> messages = new ArrayList<>(); public MqttCheckpointMark() {} public MqttCheckpointMark(String id) { clientId = id; } public void add(Message message, Instant timestamp) { if (timestamp.isBefore(oldestMessageTimestamp)) { oldestMessageTimestamp = timestamp; } messages.add(message); } @Override public void finalizeCheckpoint() { LOG.debug("Finalizing checkpoint acknowledging pending messages for client ID {}", clientId); for (Message message : messages) { try { message.ack(); } catch (Exception e) { LOG.warn("Can't ack message for client ID {}", clientId, e); } } oldestMessageTimestamp = Instant.now(); messages.clear(); } // set an empty list to messages when deserialize private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); messages = new ArrayList<>(); } @Override public boolean equals(Object other) { if (other instanceof MqttCheckpointMark) { MqttCheckpointMark that = (MqttCheckpointMark) other; return Objects.equals(this.clientId, that.clientId) && Objects.equals(this.oldestMessageTimestamp, that.oldestMessageTimestamp) && Objects.deepEquals(this.messages, that.messages); } else { return false; } } @Override public int hashCode() { return Objects.hash(clientId, oldestMessageTimestamp, messages); } } @VisibleForTesting static class UnboundedMqttSource extends UnboundedSource<byte[], MqttCheckpointMark> { private final Read spec; public UnboundedMqttSource(Read spec) { this.spec = spec; } @Override public UnboundedReader<byte[]> createReader( PipelineOptions options, MqttCheckpointMark checkpointMark) { return new UnboundedMqttReader(this, checkpointMark); } @Override public List<UnboundedMqttSource> split(int desiredNumSplits, PipelineOptions options) { // MQTT is based on a pub/sub pattern // so, if we create several subscribers on the same topic, they all will receive the same // message, resulting to duplicate messages in the PCollection. // So, for MQTT, we limit to number of split ot 1 (unique source). return Collections.singletonList(new UnboundedMqttSource(spec)); } @Override public void populateDisplayData(DisplayData.Builder builder) { spec.populateDisplayData(builder); } @Override public Coder<MqttCheckpointMark> getCheckpointMarkCoder() { return SerializableCoder.of(MqttCheckpointMark.class); } @Override public Coder<byte[]> getOutputCoder() { return ByteArrayCoder.of(); } } @VisibleForTesting static class UnboundedMqttReader extends UnboundedSource.UnboundedReader<byte[]> { private final UnboundedMqttSource source; private MQTT client; private BlockingConnection connection; private byte[] current; private Instant currentTimestamp; private MqttCheckpointMark checkpointMark; public UnboundedMqttReader(UnboundedMqttSource source, MqttCheckpointMark checkpointMark) { this.source = source; this.current = null; if (checkpointMark != null) { this.checkpointMark = checkpointMark; } else { this.checkpointMark = new MqttCheckpointMark(); } } @Override public boolean start() throws IOException { LOG.debug("Starting MQTT reader ..."); Read spec = source.spec; try { client = spec.connectionConfiguration().createClient(); LOG.debug("Reader client ID is {}", client.getClientId()); checkpointMark.clientId = client.getClientId().toString(); connection = client.blockingConnection(); connection.connect(); connection.subscribe( new Topic[] {new Topic(spec.connectionConfiguration().getTopic(), QoS.AT_LEAST_ONCE)}); return advance(); } catch (Exception e) { throw new IOException(e); } } @Override public boolean advance() throws IOException { try { LOG.trace("MQTT reader (client ID {}) waiting message ...", client.getClientId()); Message message = connection.receive(1, TimeUnit.SECONDS); if (message == null) { return false; } current = message.getPayload(); currentTimestamp = Instant.now(); checkpointMark.add(message, currentTimestamp); } catch (Exception e) { throw new IOException(e); } return true; } @Override public void close() throws IOException { LOG.debug("Closing MQTT reader (client ID {})", client.getClientId()); try { if (connection != null) { connection.disconnect(); } } catch (Exception e) { throw new IOException(e); } } @Override public Instant getWatermark() { return checkpointMark.oldestMessageTimestamp; } @Override public UnboundedSource.CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public byte[] getCurrent() { if (current == null) { throw new NoSuchElementException(); } return current; } @Override public Instant getCurrentTimestamp() { if (current == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public UnboundedMqttSource getCurrentSource() { return source; } } /** A {@link PTransform} to write and send a message to a MQTT server. */ @AutoValue public abstract static class Write extends PTransform<PCollection<byte[]>, PDone> { @Nullable abstract ConnectionConfiguration connectionConfiguration(); abstract boolean retained(); abstract Builder builder(); @AutoValue.Builder abstract static class Builder { abstract Builder setConnectionConfiguration(ConnectionConfiguration configuration); abstract Builder setRetained(boolean retained); abstract Write build(); } /** Define MQTT connection configuration used to connect to the MQTT broker. */ public Write withConnectionConfiguration(ConnectionConfiguration configuration) { checkArgument(configuration != null, "configuration can not be null"); return builder().setConnectionConfiguration(configuration).build(); } /** * Whether or not the publish message should be retained by the messaging engine. Sending a * message with the retained set to {@code false} will clear the retained message from the * server. The default value is {@code false}. When a subscriber connects, it gets the latest * retained message (else it doesn't get any existing message, it will have to wait a new * incoming message). * * @param retained Whether or not the messaging engine should retain the message. * @return The {@link Write} {@link PTransform} with the corresponding retained configuration. */ public Write withRetained(boolean retained) { return builder().setRetained(retained).build(); } @Override public PDone expand(PCollection<byte[]> input) { input.apply(ParDo.of(new WriteFn(this))); return PDone.in(input.getPipeline()); } @Override public void populateDisplayData(DisplayData.Builder builder) { connectionConfiguration().populateDisplayData(builder); builder.add(DisplayData.item("retained", retained())); } private static class WriteFn extends DoFn<byte[], Void> { private final Write spec; private transient MQTT client; private transient BlockingConnection connection; public WriteFn(Write spec) { this.spec = spec; } @Setup public void createMqttClient() throws Exception { LOG.debug("Starting MQTT writer"); client = spec.connectionConfiguration().createClient(); LOG.debug("MQTT writer client ID is {}", client.getClientId()); connection = client.blockingConnection(); connection.connect(); } @ProcessElement public void processElement(ProcessContext context) throws Exception { byte[] payload = context.element(); LOG.debug("Sending message {}", new String(payload, StandardCharsets.UTF_8)); connection.publish( spec.connectionConfiguration().getTopic(), payload, QoS.AT_LEAST_ONCE, false); } @Teardown public void closeMqttClient() throws Exception { if (connection != null) { LOG.debug("Disconnecting MQTT connection (client ID {})", client.getClientId()); connection.disconnect(); } } } } }
/* * Copyright 2015, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.benchmarks.netty; import io.grpc.CallOptions; import io.grpc.ClientCall; import io.grpc.ManagedChannel; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.MethodDescriptor.MethodType; import io.grpc.Server; import io.grpc.ServerCall; import io.grpc.ServerCallHandler; import io.grpc.ServerServiceDefinition; import io.grpc.ServiceDescriptor; import io.grpc.Status; import io.grpc.benchmarks.ByteBufOutputMarshaller; import io.grpc.netty.NegotiationType; import io.grpc.netty.NettyChannelBuilder; import io.grpc.netty.NettyServerBuilder; import io.grpc.stub.ClientCalls; import io.grpc.stub.StreamObserver; import io.netty.buffer.ByteBuf; import io.netty.buffer.PooledByteBufAllocator; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalServerChannel; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.DefaultThreadFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.ServerSocket; import java.net.SocketAddress; import java.net.SocketException; import java.net.UnknownHostException; import java.util.Enumeration; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; /** * Abstract base class for Netty end-to-end benchmarks. */ public abstract class AbstractBenchmark { /** * Standard message sizes. */ public enum MessageSize { // Max out at 1MB to avoid creating messages larger than Netty's buffer pool can handle // by default SMALL(10), MEDIUM(1024), LARGE(65536), JUMBO(1048576); private final int bytes; MessageSize(int bytes) { this.bytes = bytes; } public int bytes() { return bytes; } } /** * Standard flow-control window sizes. */ public enum FlowWindowSize { SMALL(16383), MEDIUM(65535), LARGE(1048575), JUMBO(8388607); private final int bytes; FlowWindowSize(int bytes) { this.bytes = bytes; } public int bytes() { return bytes; } } /** * Executor types used by Channel & Server. */ public enum ExecutorType { DEFAULT, DIRECT; } /** * Support channel types. */ public enum ChannelType { NIO, LOCAL; } private static final CallOptions CALL_OPTIONS = CallOptions.DEFAULT; private static final InetAddress BENCHMARK_ADDR = buildBenchmarkAddr(); /** * Resolve the address bound to the benchmark interface. Currently we assume it's a * child interface of the loopback interface with the term 'benchmark' in its name. * * <p>>This allows traffic shaping to be applied to an IP address and to have the benchmarks * detect it's presence and use it. E.g for Linux we can apply netem to a specific IP to * do traffic shaping, bind that IP to the loopback adapter and then apply a label to that * binding so that it appears as a child interface. * * <pre> * sudo tc qdisc del dev lo root * sudo tc qdisc add dev lo root handle 1: prio * sudo tc qdisc add dev lo parent 1:1 handle 2: netem delay 0.1ms rate 10gbit * sudo tc filter add dev lo parent 1:0 protocol ip prio 1 \ * u32 match ip dst 127.127.127.127 flowid 2:1 * sudo ip addr add dev lo 127.127.127.127/32 label lo:benchmark * </pre> */ private static InetAddress buildBenchmarkAddr() { InetAddress tmp = null; try { Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces(); outer: while (networkInterfaces.hasMoreElements()) { NetworkInterface networkInterface = networkInterfaces.nextElement(); if (!networkInterface.isLoopback()) { continue; } Enumeration<NetworkInterface> subInterfaces = networkInterface.getSubInterfaces(); while (subInterfaces.hasMoreElements()) { NetworkInterface subLoopback = subInterfaces.nextElement(); if (subLoopback.getDisplayName().contains("benchmark")) { tmp = subLoopback.getInetAddresses().nextElement(); System.out.println("\nResolved benchmark address to " + tmp + " on " + subLoopback.getDisplayName() + "\n\n"); break outer; } } } } catch (SocketException se) { System.out.println("\nWARNING: Error trying to resolve benchmark interface \n" + se); } if (tmp == null) { try { System.out.println( "\nWARNING: Unable to resolve benchmark interface, defaulting to localhost"); tmp = InetAddress.getLocalHost(); } catch (UnknownHostException uhe) { throw new RuntimeException(uhe); } } return tmp; } protected Server server; protected ByteBuf request; protected ByteBuf response; protected MethodDescriptor<ByteBuf, ByteBuf> unaryMethod; private MethodDescriptor<ByteBuf, ByteBuf> pingPongMethod; private MethodDescriptor<ByteBuf, ByteBuf> flowControlledStreaming; protected ManagedChannel[] channels; public AbstractBenchmark() { } /** * Initialize the environment for the executor. */ public void setup(ExecutorType clientExecutor, ExecutorType serverExecutor, MessageSize requestSize, MessageSize responseSize, FlowWindowSize windowSize, ChannelType channelType, int maxConcurrentStreams, int channelCount) throws Exception { NettyServerBuilder serverBuilder; NettyChannelBuilder channelBuilder; if (channelType == ChannelType.LOCAL) { LocalAddress address = new LocalAddress("netty-e2e-benchmark"); serverBuilder = NettyServerBuilder.forAddress(address); serverBuilder.channelType(LocalServerChannel.class); channelBuilder = NettyChannelBuilder.forAddress(address); channelBuilder.channelType(LocalChannel.class); } else { ServerSocket sock = new ServerSocket(); // Pick a port using an ephemeral socket. sock.bind(new InetSocketAddress(BENCHMARK_ADDR, 0)); SocketAddress address = sock.getLocalSocketAddress(); sock.close(); serverBuilder = NettyServerBuilder.forAddress(address); channelBuilder = NettyChannelBuilder.forAddress(address); } if (serverExecutor == ExecutorType.DIRECT) { serverBuilder.directExecutor(); } if (clientExecutor == ExecutorType.DIRECT) { channelBuilder.directExecutor(); } // Always use a different worker group from the client. ThreadFactory serverThreadFactory = new DefaultThreadFactory("STF pool", true /* daemon */); serverBuilder.workerEventLoopGroup(new NioEventLoopGroup(0, serverThreadFactory)); // Always set connection and stream window size to same value serverBuilder.flowControlWindow(windowSize.bytes()); channelBuilder.flowControlWindow(windowSize.bytes()); channelBuilder.negotiationType(NegotiationType.PLAINTEXT); serverBuilder.maxConcurrentCallsPerConnection(maxConcurrentStreams); // Create buffers of the desired size for requests and responses. PooledByteBufAllocator alloc = PooledByteBufAllocator.DEFAULT; request = alloc.buffer(requestSize.bytes()); request.writerIndex(request.capacity() - 1); response = alloc.buffer(responseSize.bytes()); response.writerIndex(response.capacity() - 1); // Simple method that sends and receives NettyByteBuf unaryMethod = MethodDescriptor.create(MethodType.UNARY, "benchmark/unary", new ByteBufOutputMarshaller(), new ByteBufOutputMarshaller()); pingPongMethod = MethodDescriptor.create(MethodType.BIDI_STREAMING, "benchmark/pingPong", new ByteBufOutputMarshaller(), new ByteBufOutputMarshaller()); flowControlledStreaming = MethodDescriptor.create(MethodType.BIDI_STREAMING, "benchmark/flowControlledStreaming", new ByteBufOutputMarshaller(), new ByteBufOutputMarshaller()); // Server implementation of unary & streaming methods serverBuilder.addService( ServerServiceDefinition.builder( new ServiceDescriptor("benchmark", unaryMethod, pingPongMethod, flowControlledStreaming)) .addMethod(unaryMethod, new ServerCallHandler<ByteBuf, ByteBuf>() { @Override public ServerCall.Listener<ByteBuf> startCall( final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) { call.sendHeaders(new Metadata()); call.request(1); return new ServerCall.Listener<ByteBuf>() { @Override public void onMessage(ByteBuf message) { // no-op message.release(); call.sendMessage(response.slice()); } @Override public void onHalfClose() { call.close(Status.OK, new Metadata()); } @Override public void onCancel() { } @Override public void onComplete() { } }; } }) .addMethod(pingPongMethod, new ServerCallHandler<ByteBuf, ByteBuf>() { @Override public ServerCall.Listener<ByteBuf> startCall( final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) { call.sendHeaders(new Metadata()); call.request(1); return new ServerCall.Listener<ByteBuf>() { @Override public void onMessage(ByteBuf message) { message.release(); call.sendMessage(response.slice()); // Request next message call.request(1); } @Override public void onHalfClose() { call.close(Status.OK, new Metadata()); } @Override public void onCancel() { } @Override public void onComplete() { } }; } }) .addMethod(flowControlledStreaming, new ServerCallHandler<ByteBuf, ByteBuf>() { @Override public ServerCall.Listener<ByteBuf> startCall( final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) { call.sendHeaders(new Metadata()); call.request(1); return new ServerCall.Listener<ByteBuf>() { @Override public void onMessage(ByteBuf message) { message.release(); while (call.isReady()) { call.sendMessage(response.slice()); } // Request next message call.request(1); } @Override public void onHalfClose() { call.close(Status.OK, new Metadata()); } @Override public void onCancel() { } @Override public void onComplete() { } @Override public void onReady() { while (call.isReady()) { call.sendMessage(response.slice()); } } }; } }) .build()); // Build and start the clients and servers server = serverBuilder.build(); server.start(); channels = new ManagedChannel[channelCount]; ThreadFactory clientThreadFactory = new DefaultThreadFactory("CTF pool", true /* daemon */); for (int i = 0; i < channelCount; i++) { // Use a dedicated event-loop for each channel channels[i] = channelBuilder .eventLoopGroup(new NioEventLoopGroup(1, clientThreadFactory)) .build(); } } /** * Start a continuously executing set of unary calls that will terminate when * {@code done.get()} is true. Each completed call will increment the counter by the specified * delta which benchmarks can use to measure QPS or bandwidth. */ protected void startUnaryCalls(int callsPerChannel, final AtomicLong counter, final AtomicBoolean done, final long counterDelta) { for (final ManagedChannel channel : channels) { for (int i = 0; i < callsPerChannel; i++) { StreamObserver<ByteBuf> observer = new StreamObserver<ByteBuf>() { @Override public void onNext(ByteBuf value) { counter.addAndGet(counterDelta); } @Override public void onError(Throwable t) { done.set(true); } @Override public void onCompleted() { if (!done.get()) { ByteBuf slice = request.slice(); ClientCalls.asyncUnaryCall( channel.newCall(unaryMethod, CALL_OPTIONS), slice, this); } } }; observer.onCompleted(); } } } /** * Start a continuously executing set of duplex streaming ping-pong calls that will terminate when * {@code done.get()} is true. Each completed call will increment the counter by the specified * delta which benchmarks can use to measure messages per second or bandwidth. */ protected void startStreamingCalls(int callsPerChannel, final AtomicLong counter, final AtomicBoolean done, final long counterDelta) { for (final ManagedChannel channel : channels) { for (int i = 0; i < callsPerChannel; i++) { final ClientCall<ByteBuf, ByteBuf> streamingCall = channel.newCall(pingPongMethod, CALL_OPTIONS); final AtomicReference<StreamObserver<ByteBuf>> requestObserverRef = new AtomicReference<StreamObserver<ByteBuf>>(); StreamObserver<ByteBuf> requestObserver = ClientCalls.asyncBidiStreamingCall( streamingCall, new StreamObserver<ByteBuf>() { @Override public void onNext(ByteBuf value) { if (!done.get()) { counter.addAndGet(counterDelta); requestObserverRef.get().onNext(request.slice()); streamingCall.request(1); } } @Override public void onError(Throwable t) { done.set(true); } @Override public void onCompleted() { } }); requestObserverRef.set(requestObserver); requestObserver.onNext(request.slice()); requestObserver.onNext(request.slice()); } } } /** * Start a continuously executing set of duplex streaming ping-pong calls that will terminate when * {@code done.get()} is true. Each completed call will increment the counter by the specified * delta which benchmarks can use to measure messages per second or bandwidth. */ protected void startFlowControlledStreamingCalls(int callsPerChannel, final AtomicLong counter, final AtomicBoolean done, final long counterDelta) { for (final ManagedChannel channel : channels) { for (int i = 0; i < callsPerChannel; i++) { final ClientCall<ByteBuf, ByteBuf> streamingCall = channel.newCall(flowControlledStreaming, CALL_OPTIONS); final AtomicReference<StreamObserver<ByteBuf>> requestObserverRef = new AtomicReference<StreamObserver<ByteBuf>>(); StreamObserver<ByteBuf> requestObserver = ClientCalls.asyncBidiStreamingCall( streamingCall, new StreamObserver<ByteBuf>() { @Override public void onNext(ByteBuf value) { if (!done.get()) { counter.addAndGet(counterDelta); streamingCall.request(1); } } @Override public void onError(Throwable t) { done.set(true); } @Override public void onCompleted() { } }); requestObserverRef.set(requestObserver); requestObserver.onNext(request.slice()); } } } /** * Shutdown all the client channels and then shutdown the server. */ protected void teardown() throws Exception { for (ManagedChannel channel : channels) { channel.shutdown(); } server.shutdown().awaitTermination(5, TimeUnit.SECONDS); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.Collection; import java.util.List; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.BlockMissingException; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.impl.CorruptFileBlockIterator; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Test; import org.slf4j.Logger; /** * This class tests the listCorruptFileBlocks API. * We create 3 files; intentionally delete their blocks * Use listCorruptFileBlocks to validate that we get the list of corrupt * files/blocks; also test the "paging" support by calling the API * with a block # from a previous call and validate that the subsequent * blocks/files are also returned. */ public class TestListCorruptFileBlocks { static final Logger LOG = NameNode.stateChangeLog; /** check if nn.getCorruptFiles() returns a file that has corrupted blocks */ @Test (timeout=300000) public void testListCorruptFilesCorruptedBlock() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new HdfsConfiguration(); // datanode scans directories conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); // datanode sends block reports conf.setInt(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 3 * 1000); // Set short retry timeouts so this test runs faster conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10); cluster = new MiniDFSCluster.Builder(conf).build(); FileSystem fs = cluster.getFileSystem(); // create two files with one block each DFSTestUtil util = new DFSTestUtil.Builder(). setName("testCorruptFilesCorruptedBlock").setNumFiles(2). setMaxLevels(1).setMaxSize(512).build(); util.createFiles(fs, "/srcdat10"); // fetch bad file list from namenode. There should be none. final NameNode namenode = cluster.getNameNode(); Collection<FSNamesystem.CorruptFileBlockInfo> badFiles = namenode. getNamesystem().listCorruptFileBlocks("/", null); assertEquals("Namenode has " + badFiles.size() + " corrupt files. Expecting None.", 0, badFiles.size()); // Now deliberately corrupt one block String bpid = cluster.getNamesystem().getBlockPoolId(); File storageDir = cluster.getInstanceStorageDir(0, 1); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); assertTrue("data directory does not exist", data_dir.exists()); List<File> metaFiles = MiniDFSCluster.getAllBlockFiles(data_dir); assertTrue("Data directory does not contain any blocks or there was an " + "IO error", metaFiles != null && !metaFiles.isEmpty()); File metaFile = metaFiles.get(0); RandomAccessFile file = new RandomAccessFile(metaFile, "rw"); FileChannel channel = file.getChannel(); long position = channel.size() - 2; int length = 2; byte[] buffer = new byte[length]; new Random(13L).nextBytes(buffer); channel.write(ByteBuffer.wrap(buffer), position); file.close(); LOG.info("Deliberately corrupting file " + metaFile.getName() + " at offset " + position + " length " + length); // read all files to trigger detection of corrupted replica try { util.checkFiles(fs, "/srcdat10"); } catch (BlockMissingException e) { System.out.println("Received BlockMissingException as expected."); } catch (IOException e) { assertTrue("Corrupted replicas not handled properly. Expecting BlockMissingException " + " but received IOException " + e, false); } // fetch bad file list from namenode. There should be one file. badFiles = namenode.getNamesystem().listCorruptFileBlocks("/", null); LOG.info("Namenode has bad files. " + badFiles.size()); assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting 1.", badFiles.size() == 1); util.cleanup(fs, "/srcdat10"); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Check that listCorruptFileBlocks works while the namenode is still in safemode. */ @Test (timeout=300000) public void testListCorruptFileBlocksInSafeMode() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new HdfsConfiguration(); // datanode scans directories conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); // datanode sends block reports conf.setInt(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 3 * 1000); // never leave safemode automatically conf.setFloat(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 1.5f); // start populating repl queues immediately conf.setFloat(DFSConfigKeys.DFS_NAMENODE_REPL_QUEUE_THRESHOLD_PCT_KEY, 0f); // Set short retry timeouts so this test runs faster conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10); cluster = new MiniDFSCluster.Builder(conf).waitSafeMode(false).build(); cluster.getNameNodeRpc().setSafeMode( HdfsConstants.SafeModeAction.SAFEMODE_LEAVE, false); FileSystem fs = cluster.getFileSystem(); // create two files with one block each DFSTestUtil util = new DFSTestUtil.Builder(). setName("testListCorruptFileBlocksInSafeMode").setNumFiles(2). setMaxLevels(1).setMaxSize(512).build(); util.createFiles(fs, "/srcdat10"); // fetch bad file list from namenode. There should be none. Collection<FSNamesystem.CorruptFileBlockInfo> badFiles = cluster.getNameNode().getNamesystem().listCorruptFileBlocks("/", null); assertEquals("Namenode has " + badFiles.size() + " corrupt files. Expecting None.", 0, badFiles.size()); // Now deliberately corrupt one block File storageDir = cluster.getInstanceStorageDir(0, 0); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, cluster.getNamesystem().getBlockPoolId()); assertTrue("data directory does not exist", data_dir.exists()); List<File> metaFiles = MiniDFSCluster.getAllBlockFiles(data_dir); assertTrue("Data directory does not contain any blocks or there was an " + "IO error", metaFiles != null && !metaFiles.isEmpty()); File metaFile = metaFiles.get(0); RandomAccessFile file = new RandomAccessFile(metaFile, "rw"); FileChannel channel = file.getChannel(); long position = channel.size() - 2; int length = 2; byte[] buffer = new byte[length]; new Random(13L).nextBytes(buffer); channel.write(ByteBuffer.wrap(buffer), position); file.close(); LOG.info("Deliberately corrupting file " + metaFile.getName() + " at offset " + position + " length " + length); // read all files to trigger detection of corrupted replica try { util.checkFiles(fs, "/srcdat10"); } catch (BlockMissingException e) { System.out.println("Received BlockMissingException as expected."); } catch (IOException e) { assertTrue("Corrupted replicas not handled properly. " + "Expecting BlockMissingException " + " but received IOException " + e, false); } // fetch bad file list from namenode. There should be one file. badFiles = cluster.getNameNode().getNamesystem(). listCorruptFileBlocks("/", null); LOG.info("Namenode has bad files. " + badFiles.size()); assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting 1.", badFiles.size() == 1); // restart namenode cluster.restartNameNode(0); fs = cluster.getFileSystem(); // wait until replication queues have been initialized while (!cluster.getNameNode().namesystem.getBlockManager() .isPopulatingReplQueues()) { try { LOG.info("waiting for replication queues"); Thread.sleep(1000); } catch (InterruptedException ignore) { } } // read all files to trigger detection of corrupted replica try { util.checkFiles(fs, "/srcdat10"); } catch (BlockMissingException e) { System.out.println("Received BlockMissingException as expected."); } catch (IOException e) { assertTrue("Corrupted replicas not handled properly. " + "Expecting BlockMissingException " + " but received IOException " + e, false); } // fetch bad file list from namenode. There should be one file. badFiles = cluster.getNameNode().getNamesystem(). listCorruptFileBlocks("/", null); LOG.info("Namenode has bad files. " + badFiles.size()); assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting 1.", badFiles.size() == 1); // check that we are still in safe mode assertTrue("Namenode is not in safe mode", cluster.getNameNode().isInSafeMode()); // now leave safe mode so that we can clean up cluster.getNameNodeRpc().setSafeMode( HdfsConstants.SafeModeAction.SAFEMODE_LEAVE, false); util.cleanup(fs, "/srcdat10"); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw e; } finally { if (cluster != null) { cluster.shutdown(); } } } // deliberately remove blocks from a file and validate the list-corrupt-file-blocks API @Test (timeout=300000) public void testlistCorruptFileBlocks() throws Exception { Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); // datanode scans // directories FileSystem fs = null; MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); fs = cluster.getFileSystem(); DFSTestUtil util = new DFSTestUtil.Builder(). setName("testGetCorruptFiles").setNumFiles(3).setMaxLevels(1). setMaxSize(1024).build(); util.createFiles(fs, "/corruptData"); final NameNode namenode = cluster.getNameNode(); Collection<FSNamesystem.CorruptFileBlockInfo> corruptFileBlocks = namenode.getNamesystem().listCorruptFileBlocks("/corruptData", null); int numCorrupt = corruptFileBlocks.size(); assertTrue(numCorrupt == 0); // delete the blocks String bpid = cluster.getNamesystem().getBlockPoolId(); for (int i = 0; i < 4; i++) { for (int j = 0; j <= 1; j++) { File storageDir = cluster.getInstanceStorageDir(i, j); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); List<File> metadataFiles = MiniDFSCluster.getAllBlockMetadataFiles( data_dir); if (metadataFiles == null) continue; // assertTrue("Blocks do not exist in data-dir", (blocks != null) && // (blocks.length > 0)); for (File metadataFile : metadataFiles) { File blockFile = Block.metaToBlockFile(metadataFile); LOG.info("Deliberately removing file " + blockFile.getName()); assertTrue("Cannot remove file.", blockFile.delete()); LOG.info("Deliberately removing file " + metadataFile.getName()); assertTrue("Cannot remove file.", metadataFile.delete()); // break; } } } int count = 0; corruptFileBlocks = namenode.getNamesystem(). listCorruptFileBlocks("/corruptData", null); numCorrupt = corruptFileBlocks.size(); while (numCorrupt < 3) { Thread.sleep(1000); corruptFileBlocks = namenode.getNamesystem() .listCorruptFileBlocks("/corruptData", null); numCorrupt = corruptFileBlocks.size(); count++; if (count > 30) break; } // Validate we get all the corrupt files LOG.info("Namenode has bad files. " + numCorrupt); assertEquals(3, numCorrupt); // test the paging here FSNamesystem.CorruptFileBlockInfo[] cfb = corruptFileBlocks .toArray(new FSNamesystem.CorruptFileBlockInfo[0]); // now get the 2nd and 3rd file that is corrupt String[] cookie = new String[]{"1"}; Collection<FSNamesystem.CorruptFileBlockInfo> nextCorruptFileBlocks = namenode.getNamesystem() .listCorruptFileBlocks("/corruptData", cookie); FSNamesystem.CorruptFileBlockInfo[] ncfb = nextCorruptFileBlocks .toArray(new FSNamesystem.CorruptFileBlockInfo[0]); numCorrupt = nextCorruptFileBlocks.size(); assertEquals(2, numCorrupt); assertTrue(ncfb[0].block.getBlockName() .equalsIgnoreCase(cfb[1].block.getBlockName())); corruptFileBlocks = namenode.getNamesystem() .listCorruptFileBlocks("/corruptData", cookie); numCorrupt = corruptFileBlocks.size(); assertEquals(0, numCorrupt); // Do a listing on a dir which doesn't have any corrupt blocks and // validate util.createFiles(fs, "/goodData"); corruptFileBlocks = namenode.getNamesystem().listCorruptFileBlocks("/goodData", null); numCorrupt = corruptFileBlocks.size(); assertEquals(0, numCorrupt); util.cleanup(fs, "/corruptData"); util.cleanup(fs, "/goodData"); } finally { if (cluster != null) { cluster.shutdown(); } } } private int countPaths(RemoteIterator<Path> iter) throws IOException { int i = 0; while (iter.hasNext()) { LOG.info("PATH: " + iter.next().toUri().getPath()); i++; } return i; } /** * test listCorruptFileBlocks in DistributedFileSystem */ @Test (timeout=300000) public void testlistCorruptFileBlocksDFS() throws Exception { Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); // datanode scans // directories FileSystem fs = null; MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); fs = cluster.getFileSystem(); DistributedFileSystem dfs = (DistributedFileSystem) fs; DFSTestUtil util = new DFSTestUtil.Builder(). setName("testGetCorruptFiles").setNumFiles(3). setMaxLevels(1).setMaxSize(1024).build(); util.createFiles(fs, "/corruptData"); RemoteIterator<Path> corruptFileBlocks = dfs.listCorruptFileBlocks(new Path("/corruptData")); int numCorrupt = countPaths(corruptFileBlocks); assertEquals(0, numCorrupt); // delete the blocks String bpid = cluster.getNamesystem().getBlockPoolId(); // For loop through number of datadirectories per datanode (2) for (int i = 0; i < 2; i++) { File storageDir = cluster.getInstanceStorageDir(0, i); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); List<File> metadataFiles = MiniDFSCluster.getAllBlockMetadataFiles( data_dir); if (metadataFiles == null) continue; // assertTrue("Blocks do not exist in data-dir", (blocks != null) && // (blocks.length > 0)); for (File metadataFile : metadataFiles) { File blockFile = Block.metaToBlockFile(metadataFile); LOG.info("Deliberately removing file " + blockFile.getName()); assertTrue("Cannot remove file.", blockFile.delete()); LOG.info("Deliberately removing file " + metadataFile.getName()); assertTrue("Cannot remove file.", metadataFile.delete()); // break; } } int count = 0; corruptFileBlocks = dfs.listCorruptFileBlocks(new Path("/corruptData")); numCorrupt = countPaths(corruptFileBlocks); while (numCorrupt < 3) { Thread.sleep(1000); corruptFileBlocks = dfs.listCorruptFileBlocks(new Path("/corruptData")); numCorrupt = countPaths(corruptFileBlocks); count++; if (count > 30) break; } // Validate we get all the corrupt files LOG.info("Namenode has bad files. " + numCorrupt); assertEquals(3, numCorrupt); util.cleanup(fs, "/corruptData"); util.cleanup(fs, "/goodData"); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Test if NN.listCorruptFiles() returns the right number of results. * The corrupt blocks are detected by the BlockPoolSliceScanner. * Also, test that DFS.listCorruptFileBlocks can make multiple successive * calls. */ @Test (timeout=300000) public void testMaxCorruptFiles() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 3 * 1000); // datanode sends block reports cluster = new MiniDFSCluster.Builder(conf).build(); FileSystem fs = cluster.getFileSystem(); final int maxCorruptFileBlocks = conf.getInt(DFSConfigKeys.DFS_NAMENODE_MAX_CORRUPT_FILE_BLOCKS_RETURNED_KEY, 100); // create 110 files with one block each DFSTestUtil util = new DFSTestUtil.Builder().setName("testMaxCorruptFiles"). setNumFiles(maxCorruptFileBlocks * 3).setMaxLevels(1).setMaxSize(512). build(); util.createFiles(fs, "/srcdat2", (short) 1); util.waitReplication(fs, "/srcdat2", (short) 1); // verify that there are no bad blocks. final NameNode namenode = cluster.getNameNode(); Collection<FSNamesystem.CorruptFileBlockInfo> badFiles = namenode. getNamesystem().listCorruptFileBlocks("/srcdat2", null); assertEquals( "Namenode has " + badFiles.size() + " corrupt files. Expecting none.", 0, badFiles.size()); // Now deliberately blocks from all files final String bpid = cluster.getNamesystem().getBlockPoolId(); for (int i=0; i<4; i++) { for (int j=0; j<=1; j++) { File storageDir = cluster.getInstanceStorageDir(i, j); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); LOG.info("Removing files from " + data_dir); List<File> metadataFiles = MiniDFSCluster.getAllBlockMetadataFiles( data_dir); if (metadataFiles == null) continue; for (File metadataFile : metadataFiles) { File blockFile = Block.metaToBlockFile(metadataFile); assertTrue("Cannot remove file.", blockFile.delete()); assertTrue("Cannot remove file.", metadataFile.delete()); } } } // Run the direcrtoryScanner to update the Datanodes volumeMap DataNode dn = cluster.getDataNodes().get(0); DataNodeTestUtils.runDirectoryScanner(dn); // Occasionally the BlockPoolSliceScanner can run before we have removed // the blocks. Restart the Datanode to trigger the scanner into running // once more. LOG.info("Restarting Datanode to trigger BlockPoolSliceScanner"); cluster.restartDataNodes(); cluster.waitActive(); badFiles = namenode.getNamesystem().listCorruptFileBlocks("/srcdat2", null); while (badFiles.size() < maxCorruptFileBlocks) { LOG.info("# of corrupt files is: " + badFiles.size()); Thread.sleep(10000); badFiles = namenode.getNamesystem(). listCorruptFileBlocks("/srcdat2", null); } badFiles = namenode.getNamesystem(). listCorruptFileBlocks("/srcdat2", null); LOG.info("Namenode has bad files. " + badFiles.size()); assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting " + maxCorruptFileBlocks + ".", badFiles.size() == maxCorruptFileBlocks); CorruptFileBlockIterator iter = (CorruptFileBlockIterator) fs.listCorruptFileBlocks(new Path("/srcdat2")); int corruptPaths = countPaths(iter); assertTrue("Expected more than " + maxCorruptFileBlocks + " corrupt file blocks but got " + corruptPaths, corruptPaths > maxCorruptFileBlocks); assertTrue("Iterator should have made more than 1 call but made " + iter.getCallsMade(), iter.getCallsMade() > 1); util.cleanup(fs, "/srcdat2"); } finally { if (cluster != null) { cluster.shutdown(); } } } @Test(timeout = 60000) public void testListCorruptFileBlocksOnRelativePath() throws Exception { Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000); conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); DistributedFileSystem dfs = (DistributedFileSystem) fs; final Path baseDir = new Path("/somewhere/base"); fs.mkdirs(baseDir); // set working dir fs.setWorkingDirectory(baseDir); DFSTestUtil util = new DFSTestUtil.Builder() .setName("testGetCorruptFilesOnRelativePath").setNumFiles(3) .setMaxLevels(1).setMaxSize(1024).build(); util.createFiles(fs, "corruptData"); RemoteIterator<Path> corruptFileBlocks = dfs .listCorruptFileBlocks(new Path("corruptData")); int numCorrupt = countPaths(corruptFileBlocks); assertEquals(0, numCorrupt); // delete the blocks String bpid = cluster.getNamesystem().getBlockPoolId(); // For loop through number of data directories per datanode (2) for (int i = 0; i < 2; i++) { File storageDir = cluster.getInstanceStorageDir(0, i); File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid); List<File> metadataFiles = MiniDFSCluster .getAllBlockMetadataFiles(data_dir); if (metadataFiles == null) continue; for (File metadataFile : metadataFiles) { File blockFile = Block.metaToBlockFile(metadataFile); LOG.info("Deliberately removing file " + blockFile.getName()); assertTrue("Cannot remove file.", blockFile.delete()); LOG.info("Deliberately removing file " + metadataFile.getName()); assertTrue("Cannot remove file.", metadataFile.delete()); } } int count = 0; corruptFileBlocks = dfs.listCorruptFileBlocks(new Path("corruptData")); numCorrupt = countPaths(corruptFileBlocks); while (numCorrupt < 3) { Thread.sleep(1000); corruptFileBlocks = dfs.listCorruptFileBlocks(new Path("corruptData")); numCorrupt = countPaths(corruptFileBlocks); count++; if (count > 30) break; } // Validate we get all the corrupt files LOG.info("Namenode has bad files. " + numCorrupt); assertEquals("Failed to get corrupt files!", 3, numCorrupt); util.cleanup(fs, "corruptData"); } finally { if (cluster != null) { cluster.shutdown(); } } } }
package com.github.skjolber.packing.iterator; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Test; import com.github.skjolber.packing.api.Box; import com.github.skjolber.packing.api.Dimension; import com.github.skjolber.packing.api.StackableItem; class PermutationRotationIteratorTest { @Test void testCount() { for(int i = 1; i <= 8; i++) { Dimension container = new Dimension(null, 3 * (i + 1), 1, 1); List<StackableItem> products1 = new ArrayList<>(); for(int k = 0; k < i; k++) { Box box = Box.newBuilder().withSize(3, 1, 1).withRotate3D().withDescription(Integer.toString(k)).withWeight(1).build(); StackableItem item = new StackableItem(box); products1.add(item); } DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products1); long count = rotator.countRotations(); int rotate = 0; do { rotate++; } while(rotator.nextRotation() != -1); assertEquals(count, rotate); } } @Test void testNumberOfUnconstrainedRotations() { Dimension container = new Dimension(null, 3, 3, 3); List<StackableItem> products = new ArrayList<>(); Box box = Box.newBuilder().withSize(1, 2, 3).withRotate3D().withDescription("0").withWeight(1).build(); products.add(new StackableItem(box)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(6, rotator.countRotations()); } @Test void testNumberOfConstrainedRotations() { Dimension container = new Dimension(null, 1, 2, 3); List<StackableItem> products = new ArrayList<>(); Box box = Box.newBuilder().withRotate3D().withSize( 1, 2, 3).withDescription("0").withWeight(1).build(); products.add(new StackableItem(box)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(1, rotator.countRotations()); } @Test void testNumberOfRotationsForSquare2D() { Dimension container = new Dimension(null, 3, 3, 3); List<StackableItem> products = new ArrayList<>(); Box box = Box.newBuilder().withSize(3, 1, 1).withRotate2D().withDescription("0").withWeight(1).build(); products.add(new StackableItem(box)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(2, rotator.countRotations()); } @Test void testNumberOfConstrainedRotationsForSquare2D() { Dimension container = new Dimension(null, 3, 1, 1); List<StackableItem> products = new ArrayList<>(); Box box = Box.newBuilder().withSize(3, 1, 1).withRotate2D().withDescription("0").withWeight(1).build(); products.add(new StackableItem(box)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(1, rotator.countRotations()); } @Test void testNumberOfRotationsForSquare3D() { Dimension container = new Dimension(null, 3, 3, 3); List<StackableItem> products = new ArrayList<>(); Box box = Box.newBuilder().withRotate3D().withSize(1, 1, 1).withDescription("0").withWeight(1).build(); products.add(new StackableItem(box)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(1, rotator.countRotations()); } @Test void testRotation() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("2").withWeight(1).build())); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); assertEquals(1, rotator.countRotations()); do { // check order unchanged for(int i = 0; i < products.size(); i++) { assertEquals(Integer.toString(i), rotator.get(i).getStackable().getDescription()); } // all rotations can fit for(int i = 0; i < products.size(); i++) { assertTrue(rotator.get(i).getValue().fitsInside3D(container)); } } while(rotator.nextRotation() != -1); } @Test void testPermutations() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("2").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("3").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("4").withWeight(1).build())); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); int count = 0; do { count++; } while(rotator.nextPermutation() != -1); assertEquals( 5 * 4 * 3 * 2 * 1, count); } @Test void testPermutationDifference() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("2").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("3").withWeight(1).build())); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); int count = 0; do { count++; int[] permutations = cloneArray(rotator.getPermutations()); int length = rotator.nextPermutation(); if(length == -1) { break; } assertThat(firstDiffIndex(permutations, rotator.getPermutations())).isEqualTo(length); } while(true); assertEquals(4 * 3 * 2 * 1, count); } public static int firstDiffIndex(int[] a, int[] b) { for(int i = 0; i < a.length; i++) { if(a[i] != b[i]) { return i; } } return -1; } public static int[] cloneArray(int[] permutations) { int[] clone = new int[permutations.length]; System.arraycopy(permutations, 0, clone, 0, permutations.length); return clone; } @Test void testPermutationsWithMultipleBoxes() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build(), 2)); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build(), 4)); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); int count = 0; do { count++; } while(rotator.nextPermutation() != -1); assertEquals( (6 * 5 * 4 * 3 * 2 * 1) / ((4 * 3 * 2 * 1) * (2 * 1)), count); } @Test void testCounts() { List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(5, 10, 10).withDescription("0").withWeight(1).build(), 2)); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(5, 10, 10).withDescription("1").withWeight(1).build(), 2)); int n = 4; Dimension container = new Dimension(null, 5 * n, 10, 10); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); int length = rotator.length(); assertEquals(4, length); } @Test void testCountPermutations1() { int n = 25; Dimension container = new Dimension(null, 5 * n, 10, 10); List<StackableItem> products = new ArrayList<>(); for(int k = 0; k < n; k++) { products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(5, 10, 10).withWeight(1).build(), 1)); } DefaultPermutationRotationIterator iterator = new DefaultPermutationRotationIterator(container, products); assertEquals(-1L, iterator.countPermutations()); } @Test void testCountPermutations2() { int n = 25; Dimension container = new Dimension(null, 5 * n, 10, 10); List<StackableItem> products = new ArrayList<>(); for(int k = 0; k < n; k++) { products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(5, 10, 10).withWeight(1).build(), 1)); } DefaultPermutationRotationIterator iterator = new DefaultPermutationRotationIterator(container, products); assertEquals(-1L, iterator.countPermutations()); } @Test void testRemovePermutations1() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("2").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("3").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("4").withWeight(1).build())); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); rotator.removePermutations(3); int[] permutations = rotator.getPermutations(); assertEquals(3, permutations[0]); assertEquals(4, permutations[1]); } @Test void testRemovePermutations2() { Dimension container = new Dimension(null, 9, 1, 1); List<StackableItem> products = new ArrayList<>(); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("0").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("1").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("2").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("3").withWeight(1).build())); products.add(new StackableItem(Box.newBuilder().withRotate3D().withSize(1, 1, 3).withDescription("4").withWeight(1).build())); DefaultPermutationRotationIterator rotator = new DefaultPermutationRotationIterator(container, products); List<Integer> remove = new ArrayList<>(); remove.add(2); remove.add(4); rotator.removePermutations(remove); int[] permutations = rotator.getPermutations(); assertEquals(0, permutations[0]); assertEquals(1, permutations[1]); assertEquals(3, permutations[2]); } }
/* * Copyright 2017 The Android Things Samples Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.receyecle.app.env; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Matrix; import android.media.Image; import android.os.Environment; import junit.framework.Assert; import java.io.File; import java.io.FileOutputStream; import java.nio.ByteBuffer; /** * Utility class for manipulating images. **/ public class ImageUtils { @SuppressWarnings("unused") private static final Logger LOGGER = new Logger(); // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges // are normalized to eight bits. static final int kMaxChannelValue = 262143; /** * Utility method to compute the allocated size in bytes of a YUV420SP image * of the given dimensions. */ public static int getYUVByteSize(final int width, final int height) { // The luminance plane requires 1 byte per pixel. final int ySize = width * height; // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. // Each 2x2 block takes 2 bytes to encode, one each for U and V. final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; return ySize + uvSize; } /** * Saves a Bitmap object to disk for analysis. * * @param bitmap The bitmap to save. */ public static void saveBitmap(final Bitmap bitmap) { final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); final File myDir = new File(root); if (!myDir.mkdirs()) { LOGGER.i("Make dir failed"); } final String fname = "preview.png"; final File file = new File(myDir, fname); if (file.exists()) { file.delete(); } try { final FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); out.flush(); out.close(); } catch (final Exception e) { LOGGER.e(e, "Exception!"); } } public static int[] convertImageToBitmap(Image image, int[] output, byte[][] cachedYuvBytes) { if (cachedYuvBytes == null || cachedYuvBytes.length != 3) { cachedYuvBytes = new byte[3][]; } Image.Plane[] planes = image.getPlanes(); fillBytes(planes, cachedYuvBytes); final int yRowStride = planes[0].getRowStride(); final int uvRowStride = planes[1].getRowStride(); final int uvPixelStride = planes[1].getPixelStride(); convertYUV420ToARGB8888(cachedYuvBytes[0], cachedYuvBytes[1], cachedYuvBytes[2], image.getWidth(), image.getHeight(), yRowStride, uvRowStride, uvPixelStride, output); return output; } public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height, int yRowStride, int uvRowStride, int uvPixelStride, int[] out) { int i = 0; for (int y = 0; y < height; y++) { int pY = yRowStride * y; int uv_row_start = uvRowStride * (y >> 1); int pU = uv_row_start; int pV = uv_row_start; for (int x = 0; x < width; x++) { int uv_offset = (x >> 1) * uvPixelStride; out[i++] = YUV2RGB( convertByteToInt(yData, pY + x), convertByteToInt(uData, pU + uv_offset), convertByteToInt(vData, pV + uv_offset)); } } } private static int convertByteToInt(byte[] arr, int pos) { return arr[pos] & 0xFF; } private static int YUV2RGB(int nY, int nU, int nV) { nY -= 16; nU -= 128; nV -= 128; if (nY < 0) nY = 0; // This is the floating point equivalent. We do the conversion in integer // because some Android devices do not have floating point in hardware. // nR = (int)(1.164 * nY + 2.018 * nU); // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); // nB = (int)(1.164 * nY + 1.596 * nV); int nR = 1192 * nY + 1634 * nV; int nG = 1192 * nY - 833 * nV - 400 * nU; int nB = 1192 * nY + 2066 * nU; nR = Math.min(kMaxChannelValue, Math.max(0, nR)); nG = Math.min(kMaxChannelValue, Math.max(0, nG)); nB = Math.min(kMaxChannelValue, Math.max(0, nB)); nR = (nR >> 10) & 0xff; nG = (nG >> 10) & 0xff; nB = (nB >> 10) & 0xff; return 0xff000000 | (nR << 16) | (nG << 8) | nB; } private static void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) { // Because of the variable row stride it's not possible to know in // advance the actual necessary dimensions of the yuv planes. for (int i = 0; i < planes.length; ++i) { final ByteBuffer buffer = planes[i].getBuffer(); if (yuvBytes[i] == null || yuvBytes[i].length != buffer.capacity()) { yuvBytes[i] = new byte[buffer.capacity()]; } buffer.get(yuvBytes[i]); } } public static void cropAndRescaleBitmap(final Bitmap src, final Bitmap dst, int sensorOrientation) { Assert.assertEquals(dst.getWidth(), dst.getHeight()); final float minDim = Math.min(src.getWidth(), src.getHeight()); final Matrix matrix = new Matrix(); // We only want the center square out of the original rectangle. final float translateX = -Math.max(0, (src.getWidth() - minDim) / 2); final float translateY = -Math.max(0, (src.getHeight() - minDim) / 2); matrix.preTranslate(translateX, translateY); final float scaleFactor = dst.getHeight() / minDim; matrix.postScale(scaleFactor, scaleFactor); // Rotate around the center if necessary. if (sensorOrientation != 0) { matrix.postTranslate(-dst.getWidth() / 2.0f, -dst.getHeight() / 2.0f); matrix.postRotate(sensorOrientation); matrix.postTranslate(dst.getWidth() / 2.0f, dst.getHeight() / 2.0f); } final Canvas canvas = new Canvas(dst); canvas.drawBitmap(src, matrix, null); } public static Matrix getTransformationMatrix( final int srcWidth, final int srcHeight, final int dstWidth, final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) { final Matrix matrix = new Matrix(); if (applyRotation != 0) { // Translate so center of image is at origin. matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); // Rotate around origin. matrix.postRotate(applyRotation); } // Account for the already applied rotation, if any, and then determine how // much scaling is needed for each axis. final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; final int inWidth = transpose ? srcHeight : srcWidth; final int inHeight = transpose ? srcWidth : srcHeight; // Apply scaling if necessary. if (inWidth != dstWidth || inHeight != dstHeight) { final float scaleFactorX = dstWidth / (float) inWidth; final float scaleFactorY = dstHeight / (float) inHeight; if (maintainAspectRatio) { // Scale by minimum factor so that dst is filled completely while // maintaining the aspect ratio. Some image may fall off the edge. final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); matrix.postScale(scaleFactor, scaleFactor); } else { // Scale exactly to fill dst from src. matrix.postScale(scaleFactorX, scaleFactorY); } } if (applyRotation != 0) { // Translate back from origin centered reference to destination frame. matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); } return matrix; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple.toolchain; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertFalse; import com.dd.plist.NSArray; import com.dd.plist.NSObject; import com.dd.plist.NSString; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.hash.HashCode; import java.nio.file.Paths; import java.util.Date; import java.util.Optional; import org.junit.Test; public class ProvisioningProfileStoreTest { private static ProvisioningProfileMetadata makeTestMetadata( String appID, Date expirationDate, String uuid) { return makeTestMetadata(appID, expirationDate, uuid, ImmutableMap.of()); } private static ProvisioningProfileMetadata makeTestMetadata( String appID, Date expirationDate, String uuid, ImmutableMap<String, NSObject> entitlements) { return makeTestMetadata(appID, expirationDate, uuid, entitlements, ImmutableSet.of()); } private static ProvisioningProfileMetadata makeTestMetadata( String appID, Date expirationDate, String uuid, ImmutableMap<String, NSObject> entitlements, ImmutableSet<HashCode> fingerprints) { return ImmutableProvisioningProfileMetadata.builder() .setAppID(ProvisioningProfileMetadata.splitAppID(appID)) .setExpirationDate(expirationDate) .setUUID(uuid) .setProfilePath(Paths.get("dummy.mobileprovision")) .setEntitlements(entitlements) .setDeveloperCertificateFingerprints(fingerprints) .build(); } private static ProvisioningProfileStore createStorefromProvisioningProfiles( Iterable<ProvisioningProfileMetadata> profiles) { return ProvisioningProfileStore.of(Suppliers.ofInstance(ImmutableList.copyOf(profiles))); } @Test public void testExpiredProfilesAreIgnored() throws Exception { ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( makeTestMetadata( "AAAAAAAAAA.*", new Date(0), "00000000-0000-0000-0000-000000000000"))); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, ProvisioningProfileStore.MATCH_ANY_ENTITLEMENT, ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual, is(equalTo(Optional.empty()))); } @Test public void testPrefixOverride() throws Exception { ProvisioningProfileMetadata expected = makeTestMetadata( "AAAAAAAAAA.*", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000"); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( expected, makeTestMetadata( "BBBBBBBBBB.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000"))); NSString[] fakeKeychainAccessGroups = {new NSString("AAAAAAAAAA.*")}; ImmutableMap<String, NSObject> fakeEntitlements = ImmutableMap.of("keychain-access-groups", new NSArray(fakeKeychainAccessGroups)); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of(fakeEntitlements), ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); } @Test public void testEntitlementKeysAreMatched() { NSString[] fakeKeychainAccessGroups = {new NSString("AAAAAAAAAA.*")}; NSArray fakeKeychainAccessGroupsArray = new NSArray(fakeKeychainAccessGroups); ImmutableMap<String, NSObject> fakeDevelopmentEntitlements = ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("development"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("bar"))); ImmutableMap<String, NSObject> fakeProductionEntitlements = ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("bar"), new NSString("baz"))); ProvisioningProfileMetadata expected = makeTestMetadata( "AAAAAAAAAA.com.facebook.test", new Date(Long.MAX_VALUE), "11111111-1111-1111-1111-111111111111", fakeProductionEntitlements); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( makeTestMetadata( "AAAAAAAAAA.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000", fakeDevelopmentEntitlements), expected)); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of( ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("bar")))), ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of( ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("xxx")))), ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertFalse(actual.isPresent()); // Test without keychain access groups. actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of( ImmutableMap.of( "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("bar")))), ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of( ImmutableMap.of( "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("xxx")))), ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertFalse(actual.isPresent()); } @Test public void testOnlyProfilesContainingValidFingerprintsAreMatched() { CodeSignIdentity validIdentity = CodeSignIdentity.of( CodeSignIdentity.toFingerprint("BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"), "iPhone Developer: Foo Bar (54321EDCBA)"); CodeSignIdentity otherIdentity = CodeSignIdentity.of( CodeSignIdentity.toFingerprint("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"), "iPhone Developer: Foo Bar (ABCDE12345)"); ProvisioningProfileMetadata expected = makeTestMetadata( "AAAAAAAAAA.com.facebook.test", new Date(Long.MAX_VALUE), "11111111-1111-1111-1111-111111111111", ImmutableMap.of(), ImmutableSet.of( validIdentity.getFingerprint().get(), otherIdentity.getFingerprint().get())); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( makeTestMetadata( "AAAAAAAAAA.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000", ImmutableMap.of(), ImmutableSet.of(otherIdentity.getFingerprint().get())), expected)); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, ProvisioningProfileStore.MATCH_ANY_ENTITLEMENT, Optional.of(ImmutableList.of(validIdentity)), new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); } @Test public void testGetByUUID() throws Exception { ProvisioningProfileMetadata expected = makeTestMetadata( "BBBBBBBBBB.*", new Date(Long.MAX_VALUE), "11111111-1111-1111-1111-111111111111"); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( expected, makeTestMetadata( "BBBBBBBBBB.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000"))); Optional<ProvisioningProfileMetadata> actual = profiles.getProvisioningProfileByUUID("11111111-1111-1111-1111-111111111111"); assertThat(actual.get(), is(equalTo(expected))); } @Test public void testMatchesSpecificApp() throws Exception { ProvisioningProfileMetadata expected = makeTestMetadata( "BBBBBBBBBB.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000"); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( expected, makeTestMetadata( "BBBBBBBBBB.com.facebook.*", new Date(Long.MAX_VALUE), "11111111-1111-1111-1111-111111111111"))); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, ProvisioningProfileStore.MATCH_ANY_ENTITLEMENT, ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); } @Test public void testMatchesWildcard() throws Exception { ProvisioningProfileMetadata expected = makeTestMetadata( "BBBBBBBBBB.*", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000"); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles(ImmutableList.of(expected)); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, ProvisioningProfileStore.MATCH_ANY_ENTITLEMENT, ProvisioningProfileStore.MATCH_ANY_IDENTITY, new StringBuffer()); assertThat(actual.get(), is(equalTo(expected))); } @Test public void testDiagnostics() { NSString[] fakeKeychainAccessGroups = {new NSString("AAAAAAAAAA.*")}; NSArray fakeKeychainAccessGroupsArray = new NSArray(fakeKeychainAccessGroups); ImmutableMap<String, NSObject> fakeDevelopmentEntitlements = ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("development"), "com.apple.security.application-groups", new NSArray(new NSString("foobar"), new NSString("bar"))); ProvisioningProfileStore profiles = createStorefromProvisioningProfiles( ImmutableList.of( makeTestMetadata( "AAAAAAAAAA.com.facebook.test", new Date(Long.MAX_VALUE), "00000000-0000-0000-0000-000000000000", fakeDevelopmentEntitlements))); StringBuffer diagnosticsBuffer = new StringBuffer(); Optional<ProvisioningProfileMetadata> actual = profiles.getBestProvisioningProfile( "com.facebook.test", ApplePlatform.IPHONEOS, Optional.of( ImmutableMap.of( "keychain-access-groups", fakeKeychainAccessGroupsArray, "aps-environment", new NSString("production"), "com.apple.security.application-groups", new NSArray(new NSString("foo"), new NSString("bar")))), ProvisioningProfileStore.MATCH_ANY_IDENTITY, diagnosticsBuffer); String diagnostics = diagnosticsBuffer.toString(); assertThat( diagnostics, containsString( "mismatched entitlement aps-environment;" + System.lineSeparator() + "value is: development" + System.lineSeparator() + "but expected: production")); assertThat( diagnostics, containsString( "mismatched entitlement com.apple.security.application-groups;" + System.lineSeparator() + "value is: (\"foobar\", \"bar\")" + System.lineSeparator() + "but expected: (\"foo\", \"bar\")")); assertFalse(actual.isPresent()); } }
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.firefox; import junit.framework.TestCase; import org.openqa.selenium.Proxy; import org.openqa.selenium.internal.InProject; import org.openqa.selenium.io.FileHandler; import org.openqa.selenium.io.TemporaryFilesystem; import org.openqa.selenium.io.Zip; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; public class FirefoxProfileTest extends TestCase { private static final String FIREBUG_PATH = "third_party/firebug/firebug-1.5.0-fx.xpi"; private FirefoxProfile profile; @Override protected void setUp() throws Exception { super.setUp(); profile = new FirefoxProfile(); } public void testShouldQuoteStringsWhenSettingStringProperties() throws Exception { profile.setPreference("cheese", "brie"); List<String> props = readGeneratedProperties(profile); boolean seenCheese = false; for (String line : props) { if (line.contains("cheese") && line.contains("\"brie\"")) { seenCheese = true; } } assertTrue(seenCheese); } public void testShouldSetIntegerPreferences() throws Exception { profile.setPreference("cheese", 1234); List<String> props = readGeneratedProperties(profile); boolean seenCheese = false; for (String line : props) { if (line.contains("cheese") && line.contains(", 1234)")) { seenCheese = true; } } assertTrue("Did not see integer value being set correctly", seenCheese); } public void testManualProxy() throws Exception { profile.setProxyPreferences( new Proxy() .setHttpProxy("foo:123") .setFtpProxy("bar:234") .setSslProxy("baz:345") .setNoProxy("localhost")); List<String> prefLines = readGeneratedProperties(profile); String prefs = new ArrayList<String>(prefLines).toString(); assertThat(prefs, containsString("network.proxy.http\", \"foo\"")); assertThat(prefs, containsString("network.proxy.http_port\", 123")); assertThat(prefs, containsString("network.proxy.ftp\", \"bar\"")); assertThat(prefs, containsString("network.proxy.ftp_port\", 234")); assertThat(prefs, containsString("network.proxy.ssl\", \"baz\"")); assertThat(prefs, containsString("network.proxy.ssl_port\", 345")); assertThat(prefs, containsString("network.proxy.no_proxies_on\", \"localhost\"")); assertThat(prefs, containsString("network.proxy.type\", 1")); } public void testProxyAutoconfigUrl() throws Exception { profile.setProxyPreferences( new Proxy() .setProxyAutoconfigUrl("http://foo/bar.pac")); List<String> prefLines = readGeneratedProperties(profile); String prefs = new ArrayList<String>(prefLines).toString(); assertThat(prefs, containsString("network.proxy.autoconfig_url\", \"http://foo/bar.pac\"")); assertThat(prefs, containsString("network.proxy.type\", 2")); } public void testProxyAutodetect() throws Exception { profile.setProxyPreferences( new Proxy() .setAutodetect(true)); List<String> prefLines = readGeneratedProperties(profile); String prefs = new ArrayList<String>(prefLines).toString(); assertThat(prefs, containsString("network.proxy.type\", 4")); } public void testShouldSetBooleanPreferences() throws Exception { profile.setPreference("cheese", false); List<String> props = readGeneratedProperties(profile); boolean seenCheese = false; for (String line : props) { if (line.contains("cheese") && line.contains(", false)")) { seenCheese = true; } } assertTrue("Did not see integer value being set correctly", seenCheese); } public void testShouldInstallExtensionFromZip() throws IOException { FirefoxProfile profile = new FirefoxProfile(); profile.addExtension(InProject.locate(FIREBUG_PATH)); File profileDir = profile.layoutOnDisk(); File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com"); assertTrue(extensionDir.exists()); } public void testShouldInstallExtensionFromDirectory() throws IOException { FirefoxProfile profile = new FirefoxProfile(); File extension = InProject.locate(FIREBUG_PATH); File unzippedExtension = FileHandler.unzip(new FileInputStream(extension)); profile.addExtension(unzippedExtension); File profileDir = profile.layoutOnDisk(); File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com"); assertTrue(extensionDir.exists()); } public void testShouldInstallExtensionUsingClasspath() throws IOException { FirefoxProfile profile = new FirefoxProfile(); profile.addExtension(FirefoxProfileTest.class, "/resource/firebug-1.5.0-fx.xpi"); File profileDir = profile.layoutOnDisk(); File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com"); assertTrue(extensionDir.exists()); } public void testShouldConvertItselfIntoAMeaningfulRepresentation() throws IOException { FirefoxProfile profile = new FirefoxProfile(); profile.setPreference("i.like.cheese", true); String json = profile.toJson(); assertNotNull(json); File dir = TemporaryFilesystem.getDefaultTmpFS().createTempDir("webdriver", "duplicated"); new Zip().unzip(json, dir); File prefs = new File(dir, "user.js"); assertTrue(prefs.exists()); assertTrue(FileHandler.readAsString(prefs).contains("i.like.cheese")); } public void testCannotOverrideAFozenPrefence() { FirefoxProfile profile = new FirefoxProfile(); try { profile.setPreference("browser.EULA.3.accepted", "foo-bar-baz"); fail(); } catch (IllegalArgumentException expected) { assertEquals( "Preference browser.EULA.3.accepted may not be overridden: frozen value=true, " + "requested value=foo-bar-baz", expected.getMessage()); } } public void testCanOverrideMaxScriptRuntimeIfGreaterThanDefaultValueOrSetToInfinity() { FirefoxProfile profile = new FirefoxProfile(); try { profile.setPreference("dom.max_script_run_time", 29); fail(); } catch (IllegalArgumentException expected) { assertEquals("dom.max_script_run_time must be == 0 || >= 30", expected.getMessage()); } profile.setPreference("dom.max_script_run_time", 31); profile.setPreference("dom.max_script_run_time", 0); } private List<String> readGeneratedProperties(FirefoxProfile profile) throws Exception { File generatedProfile = profile.layoutOnDisk(); File prefs = new File(generatedProfile, "user.js"); BufferedReader reader = new BufferedReader(new FileReader(prefs)); List<String> prefLines = new ArrayList<String>(); for (String line = reader.readLine(); line != null; line = reader.readLine()) { System.out.println("line = " + line); prefLines.add(line); } return prefLines; } public void testLayoutOnDiskSetsUserPreferences() throws IOException { profile.setPreference("browser.startup.homepage", "http://www.example.com"); Preferences parsedPrefs = parseUserPrefs(profile); assertEquals("http://www.example.com", parsedPrefs.getPreference("browser.startup.homepage")); } public void testUserPrefsArePreservedWhenConvertingToAndFromJson() throws IOException { profile.setPreference("browser.startup.homepage", "http://www.example.com"); String json = profile.toJson(); FirefoxProfile rebuilt = FirefoxProfile.fromJson(json); Preferences parsedPrefs = parseUserPrefs(rebuilt); assertEquals("http://www.example.com", parsedPrefs.getPreference("browser.startup.homepage")); } private Preferences parseUserPrefs(FirefoxProfile profile) throws IOException { File directory = profile.layoutOnDisk(); File userPrefs = new File(directory, "user.js"); FileReader reader = new FileReader(userPrefs); return new Preferences(userPrefs); } }
/*! * Copyright 2010 - 2018 Hitachi Vantara. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.pentaho.platform.pdi; import org.apache.commons.lang.StringEscapeUtils; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.platform.api.engine.IPlatformWebResource; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.WriteListener; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.doReturn; @RunWith( MockitoJUnitRunner.class ) public class WebContextServletTest { private WebContextServlet webContextServlet; private IPlatformWebResource jsFile; private IPlatformWebResource txtFile; private HttpServletRequest httpRequest; private HttpServletResponse httpResponse; private ByteArrayOutputStream mockResponseOutputStream; @Before public void setUp() throws Exception { webContextServlet = spy( new WebContextServlet() ); jsFile = new PlatformWebResource( "analyzer", "scripts/includeMe.js" ); txtFile = new PlatformWebResource( "analyzer", "scripts/includeMe.txt" ); HttpServletRequest mockRequest = mock( HttpServletRequest.class ); when( mockRequest.getRequestURI() ).thenReturn( "fake/uri/" + WebContextServlet.WEB_CONTEXT_JS ); when( mockRequest.getParameter( WebContextServlet.CONTEXT ) ).thenReturn( "testContext" ); when( mockRequest.getParameter( WebContextServlet.APPLICATION ) ).thenReturn( "testApplication" ); when( mockRequest.getParameter( WebContextServlet.LOCALE ) ).thenReturn( "xp_TO" ); this.httpRequest = mockRequest; HttpServletResponse mockResponse = mock( HttpServletResponse.class ); this.mockResponseOutputStream = new java.io.ByteArrayOutputStream(); when( mockResponse.getOutputStream() ).thenReturn( new ServletOutputStream() { @Override public boolean isReady() { return true; } @Override public void setWriteListener(WriteListener writeListener) { // noop } @Override public void write( int b ) { WebContextServletTest.this.mockResponseOutputStream.write( b ); } } ); this.httpResponse = mockResponse; } @Test public void testGetWebResources_NoMatches() throws Exception { List<String> webResources = webContextServlet.getWebResources( "analyzer", ".*\\.js" ); assertNotNull( webResources ); assertEquals( 0, webResources.size() ); } @Test public void testGetWebResources_Match() throws Exception { webContextServlet.addPlatformWebResource( jsFile ); webContextServlet.addPlatformWebResource( txtFile ); List<String> webResources = webContextServlet.getWebResources( "analyzer", ".*\\.js" ); assertNotNull( webResources ); assertEquals( 1, webResources.size() ); assertEquals( "scripts/includeMe.js", webResources.get( 0 ) ); } @Test public void testWriteWebResourcesJSToDoc() throws Exception { List<String> resources = new ArrayList<>(); resources.add( "scripts/includeMe.js" ); resources.add( "scripts/includeMeToo.js" ); PrintWriter writer = new PrintWriter( this.mockResponseOutputStream ); this.webContextServlet.writeWebResources( writer, resources ); String response = getServletResponse( writer ); resources.forEach( resource -> { String expected = getDocumentWriteExpected( resource ); assertTrue( response.contains( expected ) ); } ); } @Test public void testWriteWebResourcesCssToDoc() throws Exception { List<String> resources = new ArrayList<>(); resources.add( "styles/awesome.css" ); PrintWriter writer = new PrintWriter( this.mockResponseOutputStream ); this.webContextServlet.writeWebResources( writer, resources ); String response = getServletResponse( writer ); resources.forEach( resource -> { String expected = getDocumentWriteExpected( resource ); assertTrue( response.contains( expected ) ); } ); } @Test public void testWebContextDefinesContextPath() throws ServletException, IOException { final String response = doGetWebContextServlet(); String contextPath = WebContextServlet.CONTEXT_PATH; assertTrue( response.contains( getWebContextVarDefinition( "CONTEXT_PATH", contextPath ) ) ); } @Test public void testWebContextDefinesSessionLocale() throws ServletException, IOException { String sessionLocale = "fo_BA"; when( this.httpRequest.getParameter( "locale" ) ).thenReturn( sessionLocale ); final String response = doGetWebContextServlet(); assertTrue( response.contains( getWebContextVarDefinition( "SESSION_LOCALE", sessionLocale ) ) ); } @Test public void testDoGetDefinesRequireCfg() throws ServletException, IOException { Integer waitTime = 1337; doReturn( waitTime ).when( this.webContextServlet ).getRequireWaitTime(); String response = doGetWebContextServlet(); String expected = "var requireCfg = {" + "\n waitSeconds: " + waitTime + "," + "\n paths: {}," + "\n shim: {}," + "\n map: { \"*\": {} }," + "\n bundles: {}," + "\n config: { \"pentaho/modules\": {} }," + "\n packages: []" + "\n}"; assertTrue( response.contains( expected ) ); } @Test public void testWebContextDefinesPentahoEnvironmentModuleConfig() throws ServletException, IOException { String mockRoot = "/root/"; doReturn( mockRoot ).when( this.webContextServlet ).getServerRoot(); String mockServerPackages = mockRoot + "osgi/"; doReturn( mockServerPackages ).when( this.webContextServlet ).getServerPackages(); String mockServices = mockRoot + "services/"; doReturn( mockServices ).when( this.webContextServlet ).getServerServices(); String serverRoot = escapeEnvironmentVariable( mockRoot ); String serverPackages = escapeEnvironmentVariable( mockServerPackages ); String serverServices = escapeEnvironmentVariable( mockServices ); String sessionLocale = "fo_BA"; when( this.httpRequest.getParameter( "locale" ) ).thenReturn( sessionLocale ); String application = "testApplication"; when( this.httpRequest.getParameter( "application" ) ).thenReturn( application ); final String response = doGetWebContextServlet(); String environmentModuleConfig = "\nrequireCfg.config[\"pentaho/environment\"] = {" + "\n application: \"" + application + "\"," + "\n theme: null," + "\n locale: \"" + sessionLocale + "\"," + "\n user: {" + "\n id: null," + "\n home: null" + "\n }," + "\n server: {" + "\n root: " + serverRoot + "," + "\n packages: " + serverPackages + "," + "\n services: " + serverServices + "\n }," + "\n reservedChars: null" + "\n}"; assertTrue( response.contains( environmentModuleConfig ) ); } // region Auxiliary Methods private String doGetWebContextServlet() throws ServletException, IOException { this.webContextServlet.doGet( this.httpRequest, this.httpResponse ); return getServletResponse(); } private String getServletResponse() throws IOException { return getServletResponse( null ); } private String getServletResponse( PrintWriter writer ) throws IOException { if ( writer != null ) { writer.flush(); } return this.mockResponseOutputStream.toString( "UTF-8" ); } private String getWebContextVarDefinition( String variable, String value ) { String escapedValue = escapeEnvironmentVariable( value ); return "\n/** @deprecated - use 'pentaho/environment' module's variable instead */" + "\nvar " + variable + " = " + escapedValue + ";"; } private String getDocumentWriteExpected( String resource ) { String location = "'\" + CONTEXT_PATH + \"" + resource + "'"; if ( resource.endsWith( ".js" ) ) { return "document.write(\"<script type='text/javascript' src=" + location + "></scr\" + \"ipt>\");\n"; } else { return "document.write(\"<link rel='stylesheet' type='text/css' href=" + location + ">\");\n"; } } private String escapeEnvironmentVariable( String value ) { return "\"" + StringEscapeUtils.escapeJavaScript( value ) + "\""; } // endregion }
/* * Copyright 2017 FBK/CREATE-NET * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.createnet.raptor.sdk.events; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.createnet.raptor.models.acl.EntityType; import org.createnet.raptor.models.acl.Operation; import org.createnet.raptor.models.auth.Permission; import org.createnet.raptor.models.auth.Token; import org.createnet.raptor.sdk.Raptor; import org.createnet.raptor.sdk.Utils; import org.createnet.raptor.sdk.events.callback.ActionCallback; import org.createnet.raptor.sdk.events.callback.DataCallback; import org.createnet.raptor.sdk.events.callback.DeviceCallback; import org.createnet.raptor.models.data.RecordSet; import org.createnet.raptor.models.objects.Action; import org.createnet.raptor.models.objects.Device; import org.createnet.raptor.models.objects.Stream; import org.createnet.raptor.models.payload.ActionPayload; import org.createnet.raptor.models.payload.DevicePayload; import org.createnet.raptor.models.payload.TreeNodePayload; import org.createnet.raptor.models.tree.TreeNode; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Luca Capra <luca.capra@fbk.eu> */ public class EventListenerTest { final Logger log = LoggerFactory.getLogger(EventListenerTest.class); public static Device device; @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } private Device newDevice(String name) { Device d = new Device(); d.name(name); d.addAction("switch"); d.addAction("dimming"); d.addAction("battery"); d.addStream("test", "string", "string"); d.addStream("test2", "foo", "boolean"); Stream stream = Stream.create("testdyn"); stream.setDynamic(true); stream.setDevice(d); d.streams().put(stream.name(), stream); Assert.assertEquals(3, d.actions().size()); Assert.assertEquals(3, d.streams().size()); log.debug("Creating {} device", d.name()); return d; } @After public void tearDown() { } private void pushData(Device dev) { Raptor raptor = Utils.getRaptor(); Stream stream = dev.stream("test2"); RecordSet record = new RecordSet(stream); record.channel("foo", true); raptor.Stream().push(stream, record); record.timestamp(new Date(Instant.now().toEpochMilli() - 1000 + 200)); raptor.Stream().push(stream, record); record.timestamp(new Date(Instant.now().toEpochMilli() - 1000 + 400)); raptor.Stream().push(stream, record); record.timestamp(new Date(Instant.now().toEpochMilli() - 1000 + 600)); raptor.Stream().push(stream, record); record.timestamp(new Date(Instant.now().toEpochMilli() - 1000 + 800)); } @Test public void watchDeviceEvents() { final AtomicBoolean done = new AtomicBoolean(false); Raptor raptor = Utils.getRaptor(); log.debug("watch data events"); final Device dev = Utils.createDevice(newDevice("dev")); raptor.Inventory().subscribe(dev, new DeviceCallback() { @Override public void callback(Device obj, DevicePayload message) { log.debug("Device event received {}", message.toString()); Assert.assertEquals(obj.id(), dev.id()); done.set(true); } }); dev.addStream("test2", "foo", "boolean"); dev.addAction("sleep"); raptor.Inventory().update(dev); Utils.waitUntil(5, () -> !done.get()); } @Test public void watchDeviceTreeEvents() { final AtomicBoolean done = new AtomicBoolean(false); Raptor raptor = Utils.getRaptor(); log.debug("watch device tree events"); final TreeNode node = raptor.Tree().create(TreeNode.create("parent")); raptor.Tree().subscribe(node, (TreeNode node1, TreeNodePayload message) -> { log.debug("TreeNode event received {}", message.toString()); Assert.assertEquals(node1.getId(), node.getId()); done.set(true); }); final Device dev = raptor.Inventory().create(newDevice("dev")); raptor.Tree().add(node, dev); dev.addStream("test2", "foo", "boolean"); dev.addAction("sleep"); raptor.Inventory().update(dev); Utils.waitUntil(5, () -> !done.get()); } @Test public void watchDeviceTreeStreamData() { final AtomicInteger done = new AtomicInteger(0); Raptor r = Utils.getRaptor(); Token token = r.Admin().Token().create(new Token("test", "secret" + System.currentTimeMillis() * Math.random())); Raptor raptor = new Raptor(r.getConfig().getUrl(), token.getToken()); final int len = 5; List<Device> devices = new ArrayList(); List<TreeNode> nodes = new ArrayList(); for (int i = 0; i < len; i++) { Device dev = newDevice("dev" + i); dev.addStream("test2", "foo", "boolean"); dev.addAction("sleep"); raptor.Inventory().create(dev); TreeNode node = raptor.Tree().create(TreeNode.create("parent1")); raptor.Tree().add(node, dev); nodes.add(node); devices.add(dev); } for (int i = 0; i < len; i++) { final int ii = i; raptor.Tree().subscribe(nodes.get(i), new DataCallback() { @Override public void callback(Stream stream, RecordSet record) { log.debug("Node {} event received {}", ii, record.toString()); done.addAndGet(1); } }); } devices.forEach((d) -> { RecordSet record = new RecordSet(d.stream("test2")); record.channel("foo", true); raptor.Stream().push(record); }); Utils.waitUntil(15, () -> done.get() != len); } @Test public void watchDeepTreeEvents() { Raptor raptor = Utils.getRaptor(); log.debug("watch device tree events"); final AtomicInteger eventsReceived = new AtomicInteger(4); final TreeNode root = raptor.Tree().create(TreeNode.create("root")); final TreeNode child1 = raptor.Tree().addChild(root, TreeNode.create("child1")); final TreeNode child2 = raptor.Tree().addChild(child1, TreeNode.create("child2")); final TreeNode child3 = raptor.Tree().addChild(child2, TreeNode.create("child3")); raptor.Tree().subscribe(root, (TreeNode node, TreeNodePayload message) -> { log.debug("TreeNode {} event received {}", node.getName(), message.toString()); eventsReceived.decrementAndGet(); }); raptor.Tree().subscribe(child1, (TreeNode node, TreeNodePayload message) -> { log.debug("TreeNode {} event received {}", node.getName(), message.toString()); eventsReceived.decrementAndGet(); }); raptor.Tree().subscribe(child2, (TreeNode node, TreeNodePayload message) -> { log.debug("TreeNode {} event received {}", node.getName(), message.toString()); eventsReceived.decrementAndGet(); }); raptor.Tree().subscribe(child3, (TreeNode node, TreeNodePayload message) -> { log.debug("TreeNode {} event received {}", node.getName(), message.toString()); eventsReceived.decrementAndGet(); }); final Device dev = raptor.Inventory().create(newDevice("dev1")); raptor.Tree().add(child3, dev); dev.description("updated description"); raptor.Inventory().update(dev); Utils.waitUntil(10, () -> eventsReceived.get() == 0); } @Test public void watchDeviceDataEvents() { final AtomicInteger done = new AtomicInteger(2); Raptor raptor = Utils.createNewAdminInstance(); log.debug("watch data events"); Device dev = Utils.createDevice(raptor, newDevice("dev")); raptor.Inventory().subscribe(dev, new DataCallback() { @Override public void callback(Stream stream, RecordSet record) { log.debug("dev: Data received {}", record.toJson()); Assert.assertTrue(record.deviceId().equals(dev.getDevice().id())); Assert.assertTrue(stream.name().equals("test2")); done.decrementAndGet(); } }); pushData(dev); Utils.waitUntil(10, () -> done.get() > 0); } @Test public void watchDeviceDataWithDyncPropsEvents() { final AtomicInteger done = new AtomicInteger(1); Raptor raptor = Utils.getRaptor(); log.debug("watch data events"); Device dev = Utils.createDevice(raptor, newDevice("dev")); raptor.Inventory().subscribe(dev, new DataCallback() { @Override public void callback(Stream stream, RecordSet record) { log.debug("dev: Data received {}", record.toJson()); Assert.assertTrue(record.deviceId().equals(dev.getDevice().id())); Assert.assertTrue(stream.name().equals("testdyn")); Assert.assertTrue(record.channel("foo").getBoolean()); Assert.assertTrue(record.channel("bar").getNumber().intValue() == 42); done.decrementAndGet(); } }); Stream stream = dev.stream("testdyn"); RecordSet record = new RecordSet(stream); record.channel("foo", true); record.channel("bar", 42); raptor.Stream().push(stream, record); Utils.waitUntil(10, () -> done.get() > 0); } @Test public void watchDeviceActionEvents() { final AtomicBoolean done = new AtomicBoolean(false); Raptor raptor = Utils.createNewAdminInstance(); log.debug("watch action events"); Device dev = Utils.createDevice(raptor, newDevice("dev")); raptor.Inventory().subscribe(dev, new ActionCallback() { @Override public void callback(Action action, ActionPayload payload) { log.debug("dev: Data received for {}: {}", payload.actionId, payload.data); Assert.assertTrue(action.name().equals("switch")); Assert.assertTrue(payload.data.equals("on")); done.set(true); } }); Action action = dev.action("switch"); raptor.Action().invoke(action, "on"); Utils.waitUntil(15, () -> !done.get()); } @Test public void subscribeWithToken() { final AtomicBoolean done = new AtomicBoolean(false); Raptor raptor = Utils.getRaptor(); log.debug("subscribe with permission token"); Raptor r = Utils.createNewAdminInstance(); r.Auth().login(); Token t = r.Admin().Token().create(new Token("test", "test")); r.Admin().Token().Permission().set(t, Arrays.asList( new Permission(EntityType.device, Operation.execute, true) )); Device dev = r.Inventory().create(newDevice("dev")); Raptor r2 = new Raptor(Utils.loadSettings().getProperty("url"), t); r2.Inventory().subscribe(dev, new ActionCallback() { @Override public void callback(Action action, ActionPayload payload) { log.debug("dev: Data received for {}: {}", payload.actionId, payload.data); Assert.assertTrue(action.name().equals("switch")); Assert.assertTrue(payload.data.equals("on")); done.set(true); } }); Action action = dev.action("switch"); raptor.Action().invoke(action, "on"); Utils.waitUntil(15, () -> !done.get()); } @Test public void checkFailingSubscribePermission() { final AtomicBoolean done = new AtomicBoolean(false); log.debug("subscribe with failing permissions"); Raptor r = Utils.createNewUserInstance(); r.Auth().login(); Token t = r.Admin().Token().create(new Token("test", "test")); r.Admin().Token().Permission().set(t, Arrays.asList( new Permission(EntityType.device, Operation.execute, true) )); List<String> perms = r.Admin().Token().Permission().get(t); Assert.assertEquals(1, perms.size()); Device dev = r.Inventory().create(newDevice("dev")); Raptor r2 = new Raptor(Utils.loadSettings().getProperty("url"), t); try { r2.Inventory().subscribe(dev, new DataCallback() { @Override public void callback(Stream stream, RecordSet record) { log.debug("Got data: {}", record.toJson()); done.set(true); Assert.fail("Permission should not allow receive data"); } }); } catch (Exception e) { log.debug("Expected exception received: {}", e.getMessage()); done.set(true); } Stream stream = dev.stream("test"); RecordSet record = new RecordSet(stream); record.channel("string", "test1"); r.Stream().push(record); Utils.waitUntil(5, () -> !done.get()); } @Test public void checkSubscribeForStreamPermission() { final AtomicBoolean done = new AtomicBoolean(false); log.debug("subscribe to stream topic with permissions (subscribe, pull)"); Raptor r = Utils.createNewAdminInstance(); r.Auth().login(); Token t = r.Admin().Token().create(new Token("test", "test")); r.Admin().Token().Permission().set(t, Arrays.asList( new Permission(EntityType.device, Operation.pull, true) )); Device dev = r.Inventory().create(newDevice("dev")); Raptor r2 = new Raptor(Utils.loadSettings().getProperty("url"), t); Stream stream = dev.stream("test"); r2.Stream().subscribe(stream, new DataCallback() { @Override public void callback(Stream stream, RecordSet record) { log.debug("Got data: {}", record.toJson()); done.set(true); } }); RecordSet record = new RecordSet(stream); record.channel("string", "test1"); r.Stream().push(record); Utils.waitUntil(5, () -> !done.get()); } @Test public void checkSubscribeForActionPermission() { final AtomicBoolean done = new AtomicBoolean(false); Raptor raptor = Utils.getRaptor(); log.debug("subscribe to action topic with permissions (subscribe, execute)"); Raptor r = Utils.createNewAdminInstance(); r.Auth().login(); Token t = r.Admin().Token().create(new Token("test", "test")); r.Admin().Token().Permission().set(t, Arrays.asList( new Permission(EntityType.device, Operation.execute, true) )); Device dev = r.Inventory().create(newDevice("dev")); Raptor r2 = new Raptor(Utils.loadSettings().getProperty("url"), t); Action action = dev.action("switch"); r2.Action().subscribe(action, new ActionCallback() { @Override public void callback(Action action, ActionPayload message) { log.debug("Got data: {}", message.data); done.set(true); } }); raptor.Action().invoke(action, "on"); Utils.waitUntil(5, () -> !done.get()); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.opinion; import java.io.IOException; import java.util.*; import ghidra.app.util.MemoryBlockUtils; import ghidra.app.util.Option; import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.ByteProvider; import ghidra.app.util.bin.format.omf.*; import ghidra.app.util.bin.format.omf.OmfFixupRecord.Subrecord; import ghidra.app.util.importer.MessageLog; import ghidra.program.model.address.Address; import ghidra.program.model.address.AddressOverflowException; import ghidra.program.model.data.*; import ghidra.program.model.lang.Language; import ghidra.program.model.listing.*; import ghidra.program.model.mem.*; import ghidra.program.model.symbol.*; import ghidra.program.model.util.CodeUnitInsertionException; import ghidra.util.DataConverter; import ghidra.util.exception.CancelledException; import ghidra.util.exception.InvalidInputException; import ghidra.util.task.TaskMonitor; import ghidra.util.task.TaskMonitorAdapter; public class OmfLoader extends AbstractLibrarySupportLoader { public final static String OMF_NAME = "Relocatable Object Module Format (OMF)"; public final static long MIN_BYTE_LENGTH = 11; public final static long IMAGE_BASE = 0x2000; // Base offset to start loading segments public final static long MAX_UNINITIALIZED_FILL = 0x2000; // Maximum zero bytes added to pad initialized segments private ArrayList<OmfSymbol> externsyms = null; /** * OMF usually stores a string describing the compiler that produced it in a * translator comment. This routine maps this string to official * "secondary constraint" used by the Ghidra opinion service to pick a * language module for the program * @param record is the translator comment string * @return the "secondary constraint" */ private String mapTranslator(String record) { if (record == null) { return null; } if (record.startsWith("Borland")) { return "borlandcpp"; } if (record.startsWith("Delphi")) { return "borlanddelphi"; } if (record.startsWith("CodeGear")) { return "codegearcpp"; } return null; } @Override public Collection<LoadSpec> findSupportedLoadSpecs(ByteProvider provider) throws IOException { List<LoadSpec> loadSpecs = new ArrayList<>(); if (provider.length() < MIN_BYTE_LENGTH) { return loadSpecs; } BinaryReader reader = OmfFileHeader.createReader(provider); if (OmfFileHeader.checkMagicNumber(reader)) { reader.setPointerIndex(0); OmfFileHeader scan; try { scan = OmfFileHeader.scan(reader, TaskMonitorAdapter.DUMMY_MONITOR, true); } catch (OmfException e) { throw new IOException("Bad header format: " + e.getMessage()); } List<QueryResult> results = QueryOpinionService.query(getName(), scan.getMachineName(), mapTranslator(scan.getTranslator())); for (QueryResult result : results) { loadSpecs.add(new LoadSpec(this, IMAGE_BASE, result)); } if (loadSpecs.isEmpty()) { loadSpecs.add(new LoadSpec(this, IMAGE_BASE, true)); } } return loadSpecs; } @Override public String getName() { return OMF_NAME; } @Override protected void load(ByteProvider provider, LoadSpec loadSpec, List<Option> options, Program program, TaskMonitor monitor, MessageLog log) throws IOException, CancelledException { OmfFileHeader header = null; BinaryReader reader = OmfFileHeader.createReader(provider); try { header = OmfFileHeader.parse(reader, monitor); header.resolveNames(); header.sortSegmentDataBlocks(); OmfFileHeader.doLinking(IMAGE_BASE, header.getSegments(), header.getGroups()); } catch (OmfException ex) { if (header == null) { throw new IOException("OMF File header was corrupted"); } log.appendMsg("File was corrupted - leaving partial program " + provider.getName()); } // We don't use the file bytes to create block because the bytes are manipulated before // forming the block. Creating the FileBytes anyway in case later we want access to all // the original bytes. MemoryBlockUtils.createFileBytes(program, provider, monitor); int id = program.startTransaction("loading program from OMF"); boolean success = false; try { processSegmentHeaders(reader, header, program, monitor, log); processExternalSymbols(header, program, monitor, log); processPublicSymbols(header, program, monitor, log); processRelocations(header, program, monitor, log); success = true; } catch (AddressOverflowException e) { throw new IOException(e); } finally { program.endTransaction(id, success); } } /** * Log a (hopefully) descriptive error, if we can't process a specific relocation * @param program is the Program * @param log will receive the error message * @param state is the relocation record that could not be processed */ private void relocationError(Program program, MessageLog log, OmfFixupRecord.FixupState state) { String message; if (state.locAddress != null) { message = "Unable to process relocation at " + state.locAddress + " with type 0x" + Integer.toHexString(state.locationType); program.getBookmarkManager().setBookmark(state.locAddress, BookmarkType.ERROR, "Relocations", message); } else { message = "Badly broken relocation"; } log.appendMsg(message); } /** * Process an relocation (FIXUPP) records and create formal Ghidra relocation objects * @param header is the file header for the program * @param program is the Program * @param monitor is checked for cancellation * @param log receives error messages */ private void processRelocations(OmfFileHeader header, Program program, TaskMonitor monitor, MessageLog log) { ArrayList<OmfFixupRecord> fixups = header.getFixups(); OmfFixupRecord.FixupState state = new OmfFixupRecord.FixupState(header, externsyms, program.getLanguage()); DataConverter converter = DataConverter.getInstance(!header.isLittleEndian()); for (OmfFixupRecord fixup : fixups) { state.currentFixupRecord = fixup; Subrecord[] subrecs = fixup.getSubrecords(); Memory memory = program.getMemory(); for (Subrecord subrec : subrecs) { if (monitor.isCancelled()) { break; } if (subrec.isThread()) { ((OmfFixupRecord.ThreadSubrecord) subrec).updateState(state); } else { long finalvalue = -1; byte[] origbytes = null; try { OmfFixupRecord.FixupSubrecord fixsub = (OmfFixupRecord.FixupSubrecord) subrec; state.clear(); fixsub.resolveFixup(state); if (state.targetState == -1 || state.locAddress == null) { relocationError(program, log, state); continue; } switch (state.locationType) { case 0: // Low-order byte origbytes = new byte[1]; memory.getBytes(state.locAddress, origbytes); finalvalue = state.targetState; if (state.M) { finalvalue += origbytes[0]; } else { finalvalue -= (state.locAddress.getOffset() + 1); } memory.setByte(state.locAddress, (byte) finalvalue); break; case 1: // 16-bit offset case 5: // 16-bit loader-resolved offset (treated same as 1) origbytes = new byte[2]; memory.getBytes(state.locAddress, origbytes); finalvalue = state.targetState; if (state.M) { finalvalue += converter.getShort(origbytes); } else { finalvalue -= (state.locAddress.getOffset() + 2); } memory.setShort(state.locAddress, (short) finalvalue); break; // case 2: // 16-bit base -- logical segment base (selector) // case 3: // 32-bit Long pointer (16-bit base:16-bit offset case 4: // High-order byte (high byte of 16-bit offset) case 9: // 32-bit offset case 13: // 32-bit loader-resolved offset (treated same as 9) origbytes = new byte[4]; memory.getBytes(state.locAddress, origbytes); finalvalue = state.targetState; if (state.M) { finalvalue += converter.getInt(origbytes); } else { finalvalue -= (state.locAddress.getOffset() + 4); } memory.setInt(state.locAddress, (int) finalvalue); break; // case 11: // 48-bit pointer (16-bit base:32-bit offset) default: log.appendMsg("Unsupported relocation type " + Integer.toString(state.locationType) + " at 0x" + Long.toHexString(state.locAddress.getOffset())); break; } } catch (MemoryAccessException e) { relocationError(program, log, state); continue; } catch (OmfException e) { relocationError(program, log, state); continue; } long[] values = new long[1]; values[0] = finalvalue; program.getRelocationTable().add(state.locAddress, state.locationType, values, origbytes, null); } } } } /** * Run through the OMF segments an produce Ghidra memory blocks. * Most segments cause an initialized block to be created, but if a segment * consists only of a string of zero bytes, as described by a compact LIDATA record, * an uninitialized block is created. * @param reader is a reader for the underlying file * @param header is the OMF file header * @param program is the Program * @param mbu is the block creation utility * @param monitor is checked for cancellation * @param log receives error messages * @throws AddressOverflowException if the underlying data stream causes an address to wrap * @throws IOException for problems accessing the OMF file through the reader */ private void processSegmentHeaders(BinaryReader reader, OmfFileHeader header, Program program, TaskMonitor monitor, MessageLog log) throws AddressOverflowException, IOException { monitor.setMessage("Process segments..."); final Language language = program.getLanguage(); ArrayList<OmfSegmentHeader> segments = header.getSegments(); // int sectionNumber = 0; for (OmfSegmentHeader segment : segments) { // ++sectionNumber; if (monitor.isCancelled()) { break; } // if (segment.hasIteratedData() && segment.hasEnumeratedData()) // throw new IOException("OMF segment has both iterated and enumerated data blocks"); MemoryBlock block = null; final long segmentSize = segment.getSegmentLength(); Address segmentAddr = segment.getAddress(language); if (segmentSize == 0) { // don't create a block...just log that we've seen the segment block = program.getMemory().getBlock(segmentAddr); log.appendMsg("Empty Segment: " + segment.getName()); } else if (segment.hasNonZeroData()) { block = MemoryBlockUtils.createInitializedBlock(program, false, segment.getName(), segmentAddr, segment.getRawDataStream(reader, log), segmentSize, "Address:0x" + Long.toHexString(segmentAddr.getOffset()) + " " + "Size:0x" + Long.toHexString(segmentSize), null/*source*/, segment.isReadable(), segment.isWritable(), segment.isExecutable(), log, monitor); if (block != null) { log.appendMsg( "Created Initialized Block: " + segment.getName() + " @ " + segmentAddr); } } else { block = MemoryBlockUtils.createUninitializedBlock(program, false, segment.getName(), segmentAddr, segmentSize, "Address:0x" + Long.toHexString(segmentAddr.getOffset()) + " " + "Size:0x" + Long.toHexString(segmentSize), null/*source*/, segment.isReadable(), segment.isWritable(), segment.isExecutable(), log); if (block != null) { log.appendMsg( "Created Uninitialized Block: " + segment.getName() + " @ " + segmentAddr); } } } } /** * Locate the start of a free range of memory (for holding external symbols) * by finding an Address beyond any memory block in the program * @param program is the Program * @return the starting address of the free region */ private Address findFreeAddress(Program program) { Memory memory = program.getMemory(); // Don't consider overlay blocks for max addr Address maxAddr = memory.getMinAddress(); if (maxAddr == null) { return null; } MemoryBlock[] blocks = memory.getBlocks(); for (MemoryBlock block : blocks) { // get the physical address in case it is an overlay address Address blockEnd = block.getEnd().getPhysicalAddress(); if (blockEnd.compareTo(maxAddr) > 0) { maxAddr = blockEnd; } } // Always Align the fake External Address Space Address externAddress = null; long newOffset = (maxAddr.getOffset() + 0x1000) & 0xfffffffffffff000L; externAddress = maxAddr.getNewAddress(newOffset); return externAddress; } /** * Process any public symbol records and produce corresponding Ghidra symbols * @param header is the file header for the program * @param program is the Program * @param monitor is checked for cancellations * @param log receives any error messages */ private void processPublicSymbols(OmfFileHeader header, Program program, TaskMonitor monitor, MessageLog log) { SymbolTable symbolTable = program.getSymbolTable(); ArrayList<OmfSymbolRecord> symbols = header.getPublicSymbols(); ArrayList<OmfSegmentHeader> segments = header.getSegments(); ArrayList<OmfGroupRecord> groups = header.getGroups(); Language language = program.getLanguage(); monitor.setMessage("Creating Public Symbols"); for (OmfSymbolRecord symbolrec : symbols) { if (monitor.isCancelled()) { break; } Address addrBase = null; if (symbolrec.getSegmentIndex() != 0) { // TODO: What does it mean if both the segment and group index are non-zero? // Is the segment index group relative? // For now we assume if a segment index is present, we don't need the group index OmfSegmentHeader baseSegment = segments.get(symbolrec.getSegmentIndex() - 1); addrBase = baseSegment.getAddress(language); } else if (symbolrec.getGroupIndex() != 0) { OmfGroupRecord baseGroup = groups.get(symbolrec.getGroupIndex() - 1); addrBase = baseGroup.getAddress(language); } else { // Absolute address // The base frame is ignored by most linkers addrBase = language.getDefaultSpace().getAddress(0); } int numSymbols = symbolrec.numSymbols(); for (int i = 0; i < numSymbols; ++i) { OmfSymbol symbol = symbolrec.getSymbol(i); Address address = addrBase.add(symbol.getOffset()); symbol.setAddress(address); createSymbol(symbol, address, symbolTable, log); } } } /** * Create an OMF symbol in the program * @param symbol is the symbol record * @param address is the resolved address for the symbol * @param symbolTable is the table to hold the symbol * @param log is used to log error messages * @return true if there are no errors creating the symbol */ private boolean createSymbol(OmfSymbol symbol, Address address, SymbolTable symbolTable, MessageLog log) { Symbol existingSym = symbolTable.getPrimarySymbol(address); String name = symbol.getName(); Symbol sym = symbolTable.getGlobalSymbol(name, address); if (sym == null) { try { sym = symbolTable.createLabel(address, name, SourceType.IMPORTED); } catch (InvalidInputException e) { log.appendMsg("Unable to create symbol " + symbol.getName() + " at 0x" + Long.toHexString(address.getOffset())); return false; } } if (existingSym == null || !existingSym.isPrimary()) { sym.setPrimary(); } return true; } /** * Process any external symbol records and create the corresponding Ghidra symbols. * Build an external memory block to hold them if necessary * @param header is the file header for the program * @param program is the Program * @param monitor is checked for cancellation * @param log receives error messages */ private void processExternalSymbols(OmfFileHeader header, Program program, TaskMonitor monitor, MessageLog log) { ArrayList<OmfExternalSymbol> symbolrecs = header.getExternalSymbols(); if (symbolrecs.size() == 0) { return; } Address externalAddress = findFreeAddress(program); if (externalAddress == null) { log.appendMsg("Serious problem, there is no memory at all for symbols!"); return; } Address externalAddressStart = externalAddress; externsyms = new ArrayList<>(); SymbolTable symbolTable = program.getSymbolTable(); Language language = program.getLanguage(); monitor.setMessage("Creating External Symbols"); for (OmfExternalSymbol symbolrec : symbolrecs) { OmfSymbol[] symbols = symbolrec.getSymbols(); // TODO: Check instanceof OmfComdefRecord for (OmfSymbol symbol : symbols) { if (monitor.isCancelled()) { break; } Address address = null; if (symbol.getSegmentRef() != 0) { // Look for special Borland segment symbols OmfSegmentHeader segment = header.getExtraSegments().get(symbol.getSegmentRef() - 1); address = segment.getAddress(language); symbol.setAddress(address); externsyms.add(symbol); createSymbol(symbol, address, symbolTable, log); } else { address = externalAddress; symbol.setAddress(address); externsyms.add(symbol); if (createSymbol(symbol, address, symbolTable, log)) { externalAddress = externalAddress.add(16); } } } } createExternalBlock(program, log, externalAddress, externalAddressStart); } /** * If necessary, create an external block to hold external symbols for this file * @param program is the program representing the file * @param log for error messages * @param externalAddress is the address of the first byte of the external block * @param externalAddressStart is the address of the last byte (+1) */ private void createExternalBlock(Program program, MessageLog log, Address externalAddress, Address externalAddressStart) { //create an artificial block for the external symbols if (!externalAddressStart.equals(externalAddress)) { long size = externalAddress.subtract(externalAddressStart); try { MemoryBlock block = program.getMemory() .createUninitializedBlock(MemoryBlock.EXTERNAL_BLOCK_NAME, externalAddressStart, size, false); // assume any value in external is writable. block.setWrite(true); Address current = externalAddressStart; while (current.compareTo(externalAddress) < 0) { createUndefined(program.getListing(), program.getMemory(), current, externalAddress.getAddressSpace().getPointerSize()); current = current.add(externalAddress.getAddressSpace().getPointerSize()); } } catch (Exception e) { log.appendMsg("Error creating external memory block: " + " - " + e.getMessage()); } } } /** * Create undefined data at a specific address in the program * @param listing is the Program listing * @param memory is the Program Memory * @param addr is the Address of the data * @param size is the number of bytes in the data * @return the new created Data object * @throws CodeUnitInsertionException if the new data conflicts with another object * @throws DataTypeConflictException if the data-type cannot be created */ private Data createUndefined(Listing listing, Memory memory, Address addr, int size) throws CodeUnitInsertionException, DataTypeConflictException { MemoryBlock block = memory.getBlock(addr); if (block == null || !block.isInitialized()) { return null; } DataType undefined = Undefined.getUndefinedDataType(size); return listing.createData(addr, undefined); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex; import static org.junit.Assert.fail; import java.io.*; import org.junit.*; /** * Checks the source code of the base reactive types and locates missing * mention of {@code Backpressure:} and {@code Scheduler:} of methods. */ public class JavadocForAnnotations { static void checkSource(String baseClassName, boolean scheduler) throws Exception { File f = MaybeNo2Dot0Since.findSource(baseClassName); if (f == null) { return; } StringBuilder b = readFile(f); StringBuilder e = new StringBuilder(); if (scheduler) { scanFor(b, "@SchedulerSupport", "Scheduler:", e, baseClassName); } else { scanFor(b, "@BackpressureSupport", "Backpressure:", e, baseClassName); } if (e.length() != 0) { System.out.println(e); fail(e.toString()); } } public static StringBuilder readFile(File f) throws Exception { StringBuilder b = new StringBuilder(); BufferedReader in = new BufferedReader(new FileReader(f)); try { for (;;) { String line = in.readLine(); if (line == null) { break; } b.append(line).append('\n'); } } finally { in.close(); } return b; } static final void scanFor(StringBuilder sourceCode, String annotation, String inDoc, StringBuilder e, String baseClassName) { int index = 0; for (;;) { int idx = sourceCode.indexOf(annotation, index); if (idx < 0) { break; } int j = sourceCode.lastIndexOf("/**", idx); // see if the last /** is not before the index (last time the annotation was found // indicating an uncommented method like subscribe() if (j > index) { int k = sourceCode.indexOf(inDoc, j); if (k < 0 || k > idx) { // when printed on the console, IDEs will create a clickable link to help navigate to the offending point e.append("java.lang.RuntimeException: missing ").append(inDoc).append(" section\r\n") ; int lc = lineNumber(sourceCode, idx); e.append(" at io.reactivex.").append(baseClassName) .append(" (").append(baseClassName).append(".java:") .append(lc).append(")").append("\r\n\r\n"); } } index = idx + annotation.length(); } } static final void scanForBadMethod(StringBuilder sourceCode, String annotation, String inDoc, StringBuilder e, String baseClassName) { int index = 0; for (;;) { int idx = sourceCode.indexOf(annotation, index); if (idx < 0) { break; } int j = sourceCode.lastIndexOf("/**", idx); // see if the last /** is not before the index (last time the annotation was found // indicating an uncommented method like subscribe() if (j > index) { int k = sourceCode.indexOf(inDoc, j); if (k >= 0 && k <= idx) { int ll = sourceCode.indexOf("You specify", k); int lm = sourceCode.indexOf("This operator", k); if ((ll < 0 || ll > idx) && (lm < 0 || lm > idx)) { int n = sourceCode.indexOf("{@code ", k); if (n < idx) { int m = sourceCode.indexOf("}", n); if (m < idx) { String mname = sourceCode.substring(n + 7, m); int q = sourceCode.indexOf("@SuppressWarnings({", idx); int o = sourceCode.indexOf("{", idx); if (q + 18 == o) { o = sourceCode.indexOf("{", q + 20); } if (o >= 0) { int p = sourceCode.indexOf(" " + mname + "(", idx); if (p < 0 || p > o) { // when printed on the console, IDEs will create a clickable link to help navigate to the offending point e.append("java.lang.RuntimeException: wrong method name in description of ").append(inDoc).append(" '").append(mname).append("'\r\n") ; int lc = lineNumber(sourceCode, idx); e.append(" at io.reactivex.").append(baseClassName) .append(" (").append(baseClassName).append(".java:") .append(lc).append(")").append("\r\n\r\n"); } } } } } } } index = idx + annotation.length(); } } static void checkSchedulerBadMethod(String baseClassName) throws Exception { File f = MaybeNo2Dot0Since.findSource(baseClassName); if (f == null) { return; } StringBuilder b = readFile(f); StringBuilder e = new StringBuilder(); scanForBadMethod(b, "@SchedulerSupport", "Scheduler:", e, baseClassName); if (e.length() != 0) { System.out.println(e); fail(e.toString()); } } public static int lineNumber(StringBuilder s, int index) { int cnt = 1; for (int i = 0; i < index; i++) { if (s.charAt(i) == '\n') { cnt++; } } return cnt; } @Test public void checkFlowableBackpressure() throws Exception { checkSource(Flowable.class.getSimpleName(), false); } @Test public void checkFlowableScheduler() throws Exception { checkSource(Flowable.class.getSimpleName(), true); } @Test public void checkObservableBackpressure() throws Exception { checkSource(Observable.class.getSimpleName(), false); } @Test public void checkObservableScheduler() throws Exception { checkSource(Observable.class.getSimpleName(), true); } @Test public void checkSingleBackpressure() throws Exception { checkSource(Single.class.getSimpleName(), false); } @Test public void checkSingleScheduler() throws Exception { checkSource(Single.class.getSimpleName(), true); } @Test public void checkCompletableBackpressure() throws Exception { checkSource(Completable.class.getSimpleName(), false); } @Test public void checkCompletableScheduler() throws Exception { checkSource(Completable.class.getSimpleName(), true); } @Test public void checkMaybeBackpressure() throws Exception { checkSource(Maybe.class.getSimpleName(), false); } @Test public void checkMaybeScheduler() throws Exception { checkSource(Maybe.class.getSimpleName(), true); } @Test public void checkFlowableSchedulerDoc() throws Exception { checkSchedulerBadMethod(Flowable.class.getSimpleName()); } @Test public void checkObservableSchedulerDoc() throws Exception { checkSchedulerBadMethod(Observable.class.getSimpleName()); } @Test public void checkSingleSchedulerDoc() throws Exception { checkSchedulerBadMethod(Single.class.getSimpleName()); } @Test public void checkCompletableSchedulerDoc() throws Exception { checkSchedulerBadMethod(Completable.class.getSimpleName()); } @Test public void checkMaybeSchedulerDoc() throws Exception { checkSchedulerBadMethod(Maybe.class.getSimpleName()); } }
/* * Copyright Kay Stenschke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kstenschke.shifter.models.shiftable_types; import com.kstenschke.shifter.models.ActionContainer; import com.kstenschke.shifter.utils.UtilsEnvironment; import com.kstenschke.shifter.utils.UtilsPhp; import com.kstenschke.shifter.utils.UtilsTextual; import org.jetbrains.annotations.NonNls; import static org.apache.commons.lang.StringUtils.trim; /** * JavaScript DOC @param comment */ public class JsDoc { private static final String ACTION_TEXT = "Shift JsDoc"; @NonNls private static final String REGEX_DATA_TYPES_NATIVE = "(array|boolean|date|event|function|null|number|object|string|undefined|\\*)"; @NonNls private static final String REGEX_DATA_TYPES_ALIEN = "(bool|float|int|integer|void)"; public static boolean isJsDocBlock(String str) { str = trim(str); return str.startsWith("/**") && str.endsWith("*/") && ( (UtilsTextual.isMultiLine(str) && (str.contains("@param") || str.contains("@return"))) || str.contains("@type")); } /** * Check whether given string represents a JsDoc @param comment * * @param str String to be checked * @return boolean */ public static boolean isAtParamLine(String str) { str = trim(str); return str.startsWith("* ") && str.contains("@param"); } public static boolean isAtTypeLine(String str) { str = trim(str); return str.contains("@type") && (str.startsWith("* ") || Comment.isBlockComment(str, true, true)); } public static boolean isInvalidAtReturnsLine(String str) { str = trim(str); return str.startsWith("*") && str.contains("@return") && !str.contains("@returns"); } public static boolean isAtReturnsLine(String str, boolean allowInvalidReturnsKeyword) { str = trim(str); return str.startsWith("*") && (str.contains("@returns ") || (allowInvalidReturnsKeyword && str.contains("@return "))); } /** * @param str * @return Is native or "alien" (valid e.g. in JavaScript, Java, etc.) data type */ public static boolean isDataType(String str) { str = trim(str.toLowerCase()); return str.matches(REGEX_DATA_TYPES_NATIVE) || str.matches(REGEX_DATA_TYPES_ALIEN); } public static boolean isWordRightOfAtKeyword(String word, String line) { String[] keywords = new String[]{"@param", "@return", "@type"}; for (String keyword : keywords) { if (line.contains(keyword)) { line = trim(line.split(keyword)[1]); return line.startsWith(word); } } return false; } private static boolean containsDataType(String str, String lhs) { str = trim(str.toLowerCase()); if ( // JavaScript primitive data types str.contains(lhs + "array") || str.contains(lhs + "boolean") || str.contains(lhs + "null") || str.contains(lhs + "number") || str.contains(lhs + "object") || str.contains(lhs + "string") || str.contains(lhs + "undefined") // Complex JavaScript (object) data types || str.contains(lhs + "date") || str.contains(lhs + "event") || str.contains(lhs + "function") ) { return true; } // Non-JavaScript types known to other languages return str.contains(lhs + "bool") || str.contains(lhs + "float") || str.contains(lhs + "int") || str.contains(lhs + "void"); } public static boolean containsNoCompounds(String str) { return !(str.contains("{") && str.contains("}")); } /** * Actual shifting method * * @param actionContainer * @param word */ public static void addCompoundsAroundDataTypeAtCaretInDocument(ActionContainer actionContainer, String word) { UtilsEnvironment.replaceWordAtCaretInDocument(actionContainer, "{" + word + "}"); } /** * @param line * @param docCommentType "@param" / "@returns" / "@type" * @return */ private static String addCompoundsToDataType(String line, String docCommentType) { line = line.replaceAll("(?i)(" + docCommentType + "\\s*)" + REGEX_DATA_TYPES_NATIVE, "$1{$2}"); return line.replaceAll("(?i)(" + docCommentType + "\\s*)" + REGEX_DATA_TYPES_ALIEN, "$1{$2}"); } public static void correctInvalidReturnsCommentInDocument(ActionContainer actionContainer) { UtilsEnvironment.replaceWordAtCaretInDocument(actionContainer, "returns"); } /** * Correct invalid JsDoc block comment * * Correct "@return" into "@returns" * Add curly brackets around data shiftable_types in "@param" and "@returns" lines * Correct invalid data shiftable_types into existing primitive data shiftable_types (event => Object, int(eger) => number) * * @param actionContainer * @return */ public static boolean correctDocBlockInDocument(final ActionContainer actionContainer) { String documentText = actionContainer.document.getText(); String docBlock = documentText.substring(actionContainer.offsetSelectionStart, actionContainer.offsetSelectionEnd); String[] lines = docBlock.split("\n"); StringBuilder docBlockCorrected = new StringBuilder(); int index = 0; for (String line : lines) { if (isAtParamLine(line) || isAtReturnsLine(line, true) || isAtTypeLine(line)) { line = correctAtKeywordLine(line); } docBlockCorrected.append(index > 0 ? "\n" : "").append(line); index++; } docBlockCorrected = new StringBuilder(reduceDoubleEmptyCommentLines(docBlockCorrected.toString())); if (docBlockCorrected.toString().equals(docBlock)) { return false; } actionContainer.writeUndoable(actionContainer.getRunnableReplaceSelection(docBlockCorrected.toString(),true), ACTION_TEXT); return true; } /** * Correct JsDoc lLine * * @param line * @param keyword "@param" / "@returns" / "@type" * @return */ private static String correctAtKeywordLine(String line, String keyword) { line = correctInvalidAtReturnsStatement(line); if (containsNoCompounds(line) && containsDataType(line, " ")) { line = addCompoundsToDataType(line, keyword); } line = correctInvalidDataTypes(line, "{", true); line = correctInvalidDataTypes(line, "|", true); return containsDataType(line, "{") ? line : addDataType(line); } public static String correctAtKeywordLine(String line) { String[] keywords = new String[]{"@param", "@returns", "@type"}; for (String keyword : keywords) { line = correctAtKeywordLine(line, keyword); } return line; } private static String correctInvalidAtReturnsStatement(String line) { return line.replace(" @return ", " @returns "); } private static String correctInvalidDataTypes(String line) { return correctInvalidDataTypes(line, "", false); } private static String correctInvalidDataTypes(String line, String lhs, boolean allowVoid) { if (!allowVoid) { line = line.replace(lhs + "void", lhs + "undefined"); } return line .replace(lhs + "array", lhs + "Array") .replace(lhs + "bool", lhs + "boolean") .replace(lhs + "booleanean", lhs + "boolean") .replace(lhs + "date", lhs + "Date") .replace(lhs + "event", lhs + "Event") .replace(lhs + "float", lhs + "number") .replace(lhs + "int", lhs + "number") .replace(lhs + "integer", lhs + "number") .replace(lhs + "object", lhs + "Object"); } private static String reduceDoubleEmptyCommentLines(String block) { String[] lines = block.split("\n"); StringBuilder blockCleaned = new StringBuilder(); boolean wasPreviousEmpty = false; int index = 0; for (String line : lines) { boolean isEmpty = 0 == index || (trim(trim(line).replaceAll("\\*", "")).isEmpty()); if (0 == index || !(isEmpty && wasPreviousEmpty)) { blockCleaned.append(index > 0 ? "\n" : "").append(line); } wasPreviousEmpty = isEmpty; index++; } return blockCleaned.toString(); } private static String addDataType(String line) { String parameterName = trim(trim(line.replaceAll("\\*", "")) .replace("@param", "") .replace("@returns", "") .replace("@type", "")); if (parameterName.contains(" ")) { parameterName = parameterName.split("\\s")[0]; } if (parameterName.isEmpty()) { return line; } String jsDocParameterName = "{" + guessDataTypeByParameterName(parameterName) + "}"; if (line.contains(jsDocParameterName)) { return line; } return line.replace( parameterName, jsDocParameterName + (isAtReturnsLine(line, false) ? "" : " " + parameterName)); } private static String guessDataTypeByParameterName(String parameterName) { String parameterNameLower = parameterName.toLowerCase(); String[] camelWords = UtilsTextual.splitCamelCaseIntoWords(parameterName, true); String lastWord = camelWords[camelWords.length - 1]; if (parameterName.startsWith("$") || parameterName.matches("(?i)(\\w*elem)")) { return "*"; } if (parameterName.matches("(?i)(\\w*date\\w*)")) { return "Date"; } if ("e".equals(parameterName)) { return "Event"; } if (lastWord.matches("func|function|callback")) { return "Function"; } if (parameterName.length() == 1) { // e.g. x, y, i, etc. return "number"; } if ("params".equals(parameterName) || parameterName.matches("(?i)(\\w*obj\\w*)")) { return "Object"; } if ("useragent".equals(parameterNameLower)) { return "string"; } if ("void".equals(parameterName)) { // Intercept "id"-ending before it is mistaken for a numeric "ID" parameter return "void"; } return correctInvalidDataTypes(UtilsPhp.guessDataTypeByParameterName(parameterName)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Test; import java.io.IOException; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import static org.junit.Assert.assertTrue; public class TestCompressContent { @Test public void testSnappyCompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_COMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/x-snappy"); flowFile.assertAttributeEquals("filename", "SampleFile.txt.snappy"); } @Test public void testSnappyDecompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_DECOMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.snappy")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); } @Test public void testSnappyHadoopCompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_COMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY_HADOOP); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/x-snappy-hadoop"); flowFile.assertAttributeEquals("filename", "SampleFile.txt.snappy"); } @Test public void testSnappyHadoopDecompress() { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_DECOMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY_HADOOP); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.assertNotValid(); } @Test public void testSnappyFramedCompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_COMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY_FRAMED); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/x-snappy-framed"); flowFile.assertAttributeEquals("filename", "SampleFile.txt.sz"); } @Test public void testSnappyFramedDecompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_DECOMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_SNAPPY_FRAMED); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.sz")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); } @Test public void testBzip2DecompressConcatenated() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "bzip2"); runner.setProperty(CompressContent.UPDATE_FILENAME, "false"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFileConcat.txt.bz2")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFileConcat.txt")); flowFile.assertAttributeEquals("filename", "SampleFileConcat.txt.bz2"); // not updating filename } @Test public void testBzip2Decompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "bzip2"); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.bz2")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile1.txt.bz2")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile1.txt"); } @Test public void testProperMimeTypeFromBzip2() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "compress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "bzip2"); runner.setProperty(CompressContent.UPDATE_FILENAME, "false"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals("mime.type", "application/x-bzip2"); } @Test public void testBzip2DecompressWithBothMimeTypes() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_ATTRIBUTE); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); // ensure that we can decompress with a mime type of application/x-bzip2 final Map<String, String> attributes = new HashMap<>(); attributes.put("mime.type", "application/x-bzip2"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.bz2"), attributes); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); // ensure that we can decompress with a mime type of application/bzip2. The appropriate mime type is // application/x-bzip2, but we used to use application/bzip2. We want to ensure that we are still // backward compatible. runner.clearTransferState(); attributes.put("mime.type", "application/bzip2"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile1.txt.bz2"), attributes); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile1.txt"); } @Test public void testGzipDecompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip"); assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid()); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.gz")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile1.txt.gz")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile1.txt"); runner.clearTransferState(); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_ATTRIBUTE); Map<String, String> attributes = new HashMap<>(); attributes.put(CoreAttributes.MIME_TYPE.key(), "application/x-gzip"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.gz"), attributes); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); } @Test public void testDeflateDecompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "deflate"); assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid()); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.zlib")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); System.err.println(new String(flowFile.toByteArray())); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); } @Test public void testDeflateCompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "compress"); runner.setProperty(CompressContent.COMPRESSION_LEVEL, "6"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "deflate"); assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid()); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt.zlib")); flowFile.assertAttributeEquals("filename", "SampleFile.txt.zlib"); } @Test public void testFilenameUpdatedOnCompress() throws IOException { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "compress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip"); assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid()); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals("filename", "SampleFile.txt.gz"); } @Test public void testDecompressFailure() throws IOException { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, "decompress"); runner.setProperty(CompressContent.COMPRESSION_FORMAT, "gzip"); byte[] data = new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; runner.enqueue(data); assertTrue(runner.setProperty(CompressContent.UPDATE_FILENAME, "true").isValid()); runner.run(); runner.assertQueueEmpty(); runner.assertAllFlowFilesTransferred(CompressContent.REL_FAILURE, 1); runner.getFlowFilesForRelationship(CompressContent.REL_FAILURE).get(0).assertContentEquals(data); } @Test public void testLz4FramedCompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_COMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_LZ4_FRAMED); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/x-lz4-framed"); flowFile.assertAttributeEquals("filename", "SampleFile.txt.lz4"); } @Test public void testLz4FramedDecompress() throws Exception { final TestRunner runner = TestRunners.newTestRunner(CompressContent.class); runner.setProperty(CompressContent.MODE, CompressContent.MODE_DECOMPRESS); runner.setProperty(CompressContent.COMPRESSION_FORMAT, CompressContent.COMPRESSION_FORMAT_LZ4_FRAMED); runner.setProperty(CompressContent.UPDATE_FILENAME, "true"); runner.enqueue(Paths.get("src/test/resources/CompressedData/SampleFile.txt.lz4")); runner.run(); runner.assertAllFlowFilesTransferred(CompressContent.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(CompressContent.REL_SUCCESS).get(0); flowFile.assertContentEquals(Paths.get("src/test/resources/CompressedData/SampleFile.txt")); flowFile.assertAttributeEquals("filename", "SampleFile.txt"); } }
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.core.view.opengl.layout2.renderer; import java.net.URL; import javax.media.opengl.GL; import org.caleydo.core.util.color.Color; import org.caleydo.core.view.opengl.layout.Column.VAlign; import org.caleydo.core.view.opengl.layout2.GLElement; import org.caleydo.core.view.opengl.layout2.GLGraphics; import org.caleydo.core.view.opengl.layout2.layout.GLPadding; import org.caleydo.core.view.opengl.util.text.ETextStyle; import com.jogamp.opengl.util.texture.Texture; /** * factory class for {@link IGLRenderer} * * @author Samuel Gratzl * */ public final class GLRenderers { /** * dummy renderer, which does nothing */ public static final IGLRenderer DUMMY = new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { } }; /** * renders a full sized transparent rect */ public static final IGLRenderer RECT = fillRect(null); private GLRenderers() { } public static IGLRenderer drawRect(Color color) { return new SimpleRenderer(EWhat.DRAW_RECT, color); } public static IGLRenderer drawRoundedRect(Color color) { return new SimpleRenderer(EWhat.DRAW_ROUNDED_RECT, color); } /** * renders a full sized rect with the specified color * * @param color * the color to use * @return */ public static IGLRenderer fillRect(Color color) { return new SimpleRenderer(EWhat.FILL_RECT, color); } /** * @param color * @return */ public static IGLRenderer fillRoundedRect(Color color) { return new SimpleRenderer(EWhat.FILL_ROUNDED_RECT, color); } public static IGLRenderer drawText(final String text) { return drawText(text, VAlign.LEFT); } public static IGLRenderer drawText(final String text, final VAlign valign) { return drawText(text, valign, GLPadding.ZERO); } public static IGLRenderer drawText(final String text, final VAlign valign, final GLPadding padding) { return drawText(text, valign, padding, ETextStyle.PLAIN); } public static IGLRenderer drawText(final String text, final VAlign valign, final GLPadding padding, final ETextStyle style) { return new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { String t = text; if (text.indexOf('\n') >= 0) { t = text.substring(0, text.indexOf('\n')); } g.drawText(t, padding.left, padding.top, w - padding.hor(), h - padding.vert(), valign, style); } @Override public String toString() { return text; } }; } public static IGLRenderer fillImage(final String image) { return new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { g.fillImage(image, 0, 0, w, h); } }; } public static IGLRenderer fillImage(final Texture image) { return new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { g.fillImage(image, 0, 0, w, h); } }; } public static IGLRenderer fillImage(final URL image) { return new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { g.fillImage(image, 0, 0, w, h); } }; } public static IGLRenderer pushedImage(final Texture image) { return new IGLRenderer() { @Override public void render(GLGraphics g, float w, float h, GLElement parent) { g.fillImage(image, 0, 0, w, h); g.gl.glEnable(GL.GL_BLEND); g.gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); g.gl.glEnable(GL.GL_LINE_SMOOTH); g.color(new Color(1, 1, 1, 0.5f)).fillRoundedRect(0, 0, w, h, Math.min(w, h) * 0.25f); g.gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_ALPHA); } }; } private enum EWhat { FILL_RECT, DRAW_RECT, DRAW_DIAGONAL_LINE, DRAW_ROUNDED_RECT, FILL_ROUNDED_RECT } private static class SimpleRenderer implements IGLRenderer { private final EWhat what; private final Color color; public SimpleRenderer(EWhat what, Color color) { this.what = what; this.color = color; } @Override public void render(GLGraphics g, float w, float h, GLElement parent) { if (color != null) g.color(color); switch (what) { case DRAW_DIAGONAL_LINE: g.drawDiagonalLine(0, 0, w, h); break; case DRAW_ROUNDED_RECT: g.drawRoundedRect(0, 0, w, h, Math.min(w, h) * 0.25f); break; case DRAW_RECT: g.drawRect(0, 0, w, h); break; case FILL_ROUNDED_RECT: g.fillRoundedRect(0, 0, w, h, Math.min(w, h) * 0.25f); break; case FILL_RECT: g.fillRect(0, 0, w, h); break; } } } public static IGLRenderer drawHorLine(Color color, float lineWidth, float offset) { return new LineRenderer(color, lineWidth, offset, true); } public static IGLRenderer drawVertLine(Color color, float lineWidth, float offset) { return new LineRenderer(color, lineWidth, offset, true); } private static class LineRenderer implements IGLRenderer { private final Color color; private final float lineWidth; private final float offset; private final boolean hor; public LineRenderer(Color color, float lineWidth, float offset, boolean hor) { this.color = color; this.lineWidth = lineWidth; this.offset = offset; this.hor = hor; } @Override public void render(GLGraphics g, float w, float h, GLElement parent) { g.color(color); if (lineWidth != 1) g.lineWidth(lineWidth); if (hor) g.drawLine(offset, h * 0.5f, w - offset, h * 0.5f); else g.drawLine(w * 0.5f, offset, w * 0.5f, h - offset); if (lineWidth != 1) g.lineWidth(1); } } }
package com.chanytime.baseproject.log; import java.net.InetAddress; import org.apache.log4j.Level; import org.apache.log4j.MDC; import org.apache.log4j.spi.LoggerFactory; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import com.chanytime.baseproject.configuration.Properties; import com.chanytime.baseproject.util.MiscUtil; /** * Logger is an extension of the log4j Logger class with some improvements. The * following is a list of such improvements. * <ul> * <li>Exposed the TRACE level for logging * <li>All levels take a Throwable object to print a backtrace for exceptions * <li>The ability to send an email log message for production systems * <li>The ability to use sprintf semantics * <li>sprintf penalties aren't incurred unless that log level is enabled * <li>Each logger statement has the method from which the logger was called * <li>Each logger statement can be correlated with a session ID * </ul> * <p> * The following is a way to call this logger. * * <pre> * {@code * Logger.getLogger().debug("a simple message"); * Logger.getLogger().debug("a number: %d", 5); * try { * ... * } * catch (Exception caughtException) { * Logger.getLogger().debug(caughtException, * "this will put a backtrace in the logs and be sent via email"); * } * Logger.getLogger().sendEmailOnProduction("this will be sent via email!"); * } * </pre> * * @author brchan * @version %I%, %G% * @see org.apache.log4j.Logger */ public class Logger extends org.apache.log4j.Logger { /** * LoggerFactoryImpl serves as a private class to instantiate the base-class * Logger (as opposed to the log4j version). * * @author brchan * @version %I%, %G% */ private static class LoggerFactoryImpl implements LoggerFactory { /** * Implements LoggerFactory to return an instance of the base-class * Logger. * * @param _name * the name of the logger to create. * @return a new instance of the base-class logger with the specified name */ @Override public Logger makeNewLoggerInstance(String _name) { try { return (Logger) Class.forName("com.chanytime.baseproject.log.Logger").newInstance(); } catch (Exception _e) { System.out.println("Caught exception " + _e.toString() + " in makeNewLoggerInstance()"); return null; } } } private final static LoggerFactory m_loggerFactory = new LoggerFactoryImpl(); private final static String LOGGER_NAME = "mainLogger"; private final static String MDC_SESSION_ID_KEY = "sessionID"; private final static String MDC_CLASS_METHOD_KEY = "classMethod"; /** * Constructs a default Logger. * * @return a default Logger */ public Logger() { super(LOGGER_NAME); } private static boolean m_initialized = false; /** * Returns the static instance of the logger. * * @return the static instance of the logger */ public static Logger getLogger() { // If we haven't initialized the MDC session ID key, we do so for the // first (and only) time. If the logging format wants to display log // statements, we must have at least a null key in there to prevent // an issue with logger at runtime. if (!m_initialized) { if (MDC.get(MDC_SESSION_ID_KEY) == null) { MDC.put(MDC_SESSION_ID_KEY, "null"); } m_initialized = true; } return (Logger) org.apache.log4j.Logger.getLogger(LOGGER_NAME, m_loggerFactory); } /** * Sets the session ID to correlate multiple calls to the logger to a single * session, which is defined by the client. The client has the responsibility * of generating session IDs which do not collide and keeping it across * process boundaries. * * @param _sessionID * the client-generated session ID for correlating this log * statement */ public static void setSessionID(String _sessionID) { String loggerIdentifier = Properties.getProperties().getLoggerIdentifier(); if (loggerIdentifier != null && loggerIdentifier != "") { _sessionID += " - " + loggerIdentifier; } MDC.put(MDC_SESSION_ID_KEY, _sessionID); } /** * Sends an email to system operators on production systems. This will work * only on production systems, and the email destination is specified in the * properties file. * * @param _message * the message to send in HTML format */ public void sendEmailOnProduction(String _message) { if (Properties.getProperties().getProduction()) { try { String session = (String) MDC.get(MDC_SESSION_ID_KEY); if (session == null) { session = "null"; } String computerName = "null"; try { computerName = InetAddress.getLocalHost().getHostName(); } catch (Exception _e) { } MiscUtil.sendHtmlEmail(Properties.getProperties().getLoggerEmailDestination(), "Session (" + session + ") on " + computerName + " received a message to send email at " + DateTime.now(DateTimeZone.UTC).toString() + "!", _message, DateTime.now(DateTimeZone.UTC)); } catch (Exception _e) { error("Caught exception trying to send exception email, message: %s", _e.getMessage()); } } } /** * Sends an email with the backtrace in the specified exception. * * @param _t * the exception with the backtrace information to send */ private void sendExceptionEmail(Throwable _t) { if (Properties.getProperties().getProduction()) { try { String session = (String) MDC.get(MDC_SESSION_ID_KEY); if (session == null) { session = "null"; } String computerName = "null"; try { computerName = InetAddress.getLocalHost().getHostName(); } catch (Exception _e) { } MiscUtil.sendHtmlEmail(Properties.getProperties().getLoggerEmailDestination(), "Session (" + session + ") on " + computerName + " encountered an exception at " + DateTime.now(DateTimeZone.UTC).toString() + "!", getStackTrace(_t, "<br/>"), DateTime.now(DateTimeZone.UTC)); } catch (Exception _e) { error("Caught exception trying to send exception email, message: %s", _e.getMessage()); } } } /** * Retrieves the class and method that called the logger to log a statement * to be output in the logger output. */ private void addClassMethodNameToMDC() { StackTraceElement s = new Exception().getStackTrace()[2]; String className = s.getClassName(); String methodName = s.getMethodName(); MDC.put(MDC_CLASS_METHOD_KEY, className.substring(className.lastIndexOf('.') + 1) + "." + methodName); } private static String getStackTrace(Throwable _t, String _lineSeparator) { final StringBuilder result = new StringBuilder(); result.append("EXCEPTION (message: " + _t.getMessage() + ") -- details:"); result.append(_lineSeparator); result.append("[EXCEPTION] "); result.append(_t.toString()); for (StackTraceElement element : _t.getStackTrace()) { result.append(_lineSeparator); result.append("[EXCEPTION] at "); result.append(element); } return result.toString(); } /** * Logs a message at the FATAL level. * * @param _string * the message */ public void fatal(String _string) { if (isEnabledFor(Level.FATAL)) { addClassMethodNameToMDC(); super.fatal(_string); } } /** * Logs a message at the FATAL level. * * @param _format * the message format * @param _args * arguments to the message format */ public void fatal(String _format, Object... _args) { if (isEnabledFor(Level.FATAL)) { addClassMethodNameToMDC(); super.fatal(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the FATAL level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void fatal(Throwable _t, String _format, Object... _args) { if (isEnabledFor(Level.FATAL)) { addClassMethodNameToMDC(); super.fatal(String.format(_format, _args)); super.fatal(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } /** * Logs a message at the ERROR level. * * @param _string * the message */ public void error(String _string) { if (isEnabledFor(Level.ERROR)) { addClassMethodNameToMDC(); super.error(_string); } } /** * Logs a message at the ERROR level. * * @param _format * the message format * @param _args * arguments to the message format */ public void error(String _format, Object... _args) { if (isEnabledFor(Level.ERROR)) { addClassMethodNameToMDC(); super.error(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the ERROR level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void error(Throwable _t, String _format, Object... _args) { if (isEnabledFor(Level.ERROR)) { addClassMethodNameToMDC(); super.error(String.format(_format, _args)); super.error(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } /** * Logs a message at the WARN level. * * @param _string * the message */ public void warn(String _string) { if (isEnabledFor(Level.WARN)) { addClassMethodNameToMDC(); super.warn(_string); } } /** * Logs a message at the WARN level. * * @param _format * the message format * @param _args * arguments to the message format */ public void warn(String _format, Object... _args) { if (isEnabledFor(Level.WARN)) { addClassMethodNameToMDC(); super.warn(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the WARN level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void warn(Throwable _t, String _format, Object... _args) { if (isEnabledFor(Level.WARN)) { addClassMethodNameToMDC(); super.warn(String.format(_format, _args)); super.warn(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } /** * Logs a message at the DEBUG level. * * @param _string * the message */ public void debug(String _string) { if (isDebugEnabled()) { addClassMethodNameToMDC(); super.debug(_string); } } /** * Logs a message at the DEBUG level. * * @param _format * the message format * @param _args * arguments to the message format */ public void debug(String _format, Object... _args) { if (isDebugEnabled()) { addClassMethodNameToMDC(); super.debug(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the DEBUG level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void debug(Throwable _t, String _format, Object... _args) { if (isDebugEnabled()) { addClassMethodNameToMDC(); super.debug(String.format(_format, _args)); super.debug(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } /** * Logs a message at the INFO level. * * @param _string * the message */ public void info(String _string) { if (isInfoEnabled()) { addClassMethodNameToMDC(); super.info(_string); } } /** * Logs a message at the INFO level. * * @param _format * the message format * @param _args * arguments to the message format */ public void info(String _format, Object... _args) { if (isInfoEnabled()) { addClassMethodNameToMDC(); super.info(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the INFO level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void info(Throwable _t, String _format, Object... _args) { if (isInfoEnabled()) { addClassMethodNameToMDC(); super.info(String.format(_format, _args)); super.info(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } /** * Logs a message at the TRACE level. * * @param _string * the message */ public void trace(String _string) { if (isTraceEnabled()) { addClassMethodNameToMDC(); super.trace(_string); } } /** * Logs a message at the TRACE level. * * @param _format * the message format * @param _args * arguments to the message format */ public void trace(String _format, Object... _args) { if (isTraceEnabled()) { addClassMethodNameToMDC(); super.trace(String.format(_format, _args)); } } /** * Logs a message with an exception backtrace at the TRACE level. * * @param _t * the exception * @param _format * the message format * @param _args * arguments to the message format */ public void trace(Throwable _t, String _format, Object... _args) { if (isTraceEnabled()) { addClassMethodNameToMDC(); super.trace(String.format(_format, _args)); super.trace(getStackTrace(_t, System.getProperty("line.separator"))); } sendExceptionEmail(_t); } }
package com.sfsu.db; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.support.v7.appcompat.BuildConfig; import android.util.Log; import com.sfsu.entities.Entity; import com.sfsu.entities.Tick; import java.util.ArrayList; import java.util.List; /** * Dao for Ticks related DB operation and data manipulation. * Created by Pavitra on 10/8/2015. */ public class TickDao implements EntityDao { // Singleton pattern private static final TickDao mInstance = new TickDao(); private static final String TAG = "~!@#$TickDao"; private SQLiteDatabase db; private String[] tickEntryArray = new String[]{ EntityTable.TicksTable.COLUMN_ID, EntityTable.TicksTable.COLUMN_TICK_NAME, EntityTable.TicksTable.COLUMN_TICK_SCIENTIFIC_NAME, EntityTable.TicksTable.COLUMN_TICK_SPECIES, EntityTable.TicksTable.COLUMN_KNOWN_FOR, EntityTable.TicksTable.COLUMN_DESCRIPTION, EntityTable.TicksTable.COLUMN_FOUND_NEAR, EntityTable.TicksTable.COLUMN_IMAGE, EntityTable.TicksTable.COLUMN_CREATED_AT, EntityTable.TicksTable.COLUMN_UPDATED_AT}; /** * Required */ private TickDao() { } /** * Returns static singleton instance of TickDao * * @return */ public static TickDao getInstance() { return mInstance; } @Override public void setDatabase(SQLiteDatabase db) { this.db = db; } /** * save(Tick) method is used to save the entries (field values) in to Tick Database table * * @param ticks * @return */ public long save(Entity entity) { long isSaved = 0; try { Tick tick = (Tick) entity; ContentValues contentValues = getContentValues(tick); // save the entity isSaved = db.insert(EntityTable.TicksTable.TABLENAME, null, contentValues); } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "save: ", se); isSaved = -1; } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "save: ", e); isSaved = -1; } return isSaved; } /** * save(Tick) method is used to save the entries (field values) in to Tick Database table * * @param ticks * @return */ public long save(List<Tick> tickList) { long isSaved = 0; try { for (int i = 0; i < tickList.size(); i++) { Tick tick = tickList.get(i); ContentValues contentValues = getContentValues(tick); db.insert(EntityTable.TicksTable.TABLENAME, null, contentValues); } isSaved = tickList.size(); } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "saveList: ", se); isSaved = -1; } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "saveList: ", e); isSaved = -1; } return isSaved; } /** * Returns ContentValue object for the current TickDao * * @param tick * @return */ private ContentValues getContentValues(Tick tick) { ContentValues contentValues = new ContentValues(); try { contentValues.put(EntityTable.TicksTable.COLUMN_ID, tick.getId()); contentValues.put(EntityTable.TicksTable.COLUMN_TICK_NAME, tick.getTickName()); contentValues.put(EntityTable.TicksTable.COLUMN_TICK_SCIENTIFIC_NAME, tick.getScientific_name()); contentValues.put(EntityTable.TicksTable.COLUMN_TICK_SPECIES, tick.getSpecies()); contentValues.put(EntityTable.TicksTable.COLUMN_KNOWN_FOR, tick.getKnown_for()); contentValues.put(EntityTable.TicksTable.COLUMN_DESCRIPTION, tick.getDescription()); contentValues.put(EntityTable.TicksTable.COLUMN_FOUND_NEAR, tick.getFound_near_habitat()); contentValues.put(EntityTable.TicksTable.COLUMN_IMAGE, tick.getImageUrl()); contentValues.put(EntityTable.TicksTable.COLUMN_CREATED_AT, tick.getCreated_at()); contentValues.put(EntityTable.TicksTable.COLUMN_UPDATED_AT, tick.getUpdated_at()); return contentValues; } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "getContentValues: ", se); return null; } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "getContentValues: ", e); return null; } } /** * update(Tick) method to update the entries in Tick Table * * @param Ticks * @return */ @Override public boolean update(String id, Entity entity) { boolean isUpdated = false; try { Tick tick = (Tick) entity; ContentValues contentValues = getContentValues(tick); // update the record. isUpdated = db.update( EntityTable.TicksTable.TABLENAME, contentValues, EntityTable.TicksTable.COLUMN_ID + "=?", new String[]{tick.getId() + ""}) > 0; } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "update: ", se); } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "update: ", e); } return isUpdated; } /** * Build the {@link Tick} object from Cursor. * * @param c * @return */ public Tick buildFromCursor(Cursor c) { Tick tickItem = null; try { if (c != null) { tickItem = new Tick(); tickItem.setId(c.getString(0)); tickItem.setTickName(c.getString(1)); tickItem.setScientific_name(c.getString(2)); tickItem.setSpecies(c.getString(3)); tickItem.setKnown_for(c.getString(4)); tickItem.setDescription(c.getString(5)); tickItem.setFound_near_habitat(c.getString(6)); tickItem.setImageUrl(c.getString(7)); tickItem.setCreated_at(c.getLong(8)); tickItem.setUpdated_at(c.getLong(9)); } return tickItem; } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "buildFromCursor: ", se); return null; } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "buildFromCursor: ", e); return null; } } /** * Delete the Tick entry from the Table. * * @param Ticks * @return */ @Override public boolean delete(String id) { boolean isDeleted = false; try { isDeleted = db.delete( EntityTable.TicksTable.TABLENAME, EntityTable.TicksTable.COLUMN_ID + "=?", new String[]{id + ""}) > 0; } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "buildFromCursor: ", se); } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "buildFromCursor: ", e); } return isDeleted; } /** * Returns Tick for corresponding Id. * * @param id * @return */ @Override public Tick get(String id) { Tick tickItem = null; Cursor c = null; try { c = db.query(true, EntityTable.TicksTable.TABLENAME, tickEntryArray, EntityTable.TicksTable.COLUMN_ID + "=?", new String[]{id + ""}, null, null, null, null); if (c != null && c.moveToFirst()) { tickItem = buildFromCursor(c); } } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "get: ", se); } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "get: ", e); } finally { if (!c.isClosed()) { c.close(); } } return tickItem; } /** * Returns Tick for corresponding Name. * * @param id * @return */ @Override public Tick getByName(String name) { Tick tickItem = null; Cursor c = null; try { c = db.query(true, EntityTable.TicksTable.TABLENAME, tickEntryArray, EntityTable.TicksTable.COLUMN_TICK_NAME + "=?", new String[]{name + ""}, null, null, null, null); if (c != null && c.moveToFirst()) { tickItem = buildFromCursor(c); } } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "getByName: ", se); } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "getByName: ", e); } finally { if (!c.isClosed()) { c.close(); } } return tickItem; } /** * Returns list of {@link Tick} stored in database. * * @return */ public List<Tick> getAll() { List<Tick> ticksList = new ArrayList<Tick>(); Cursor c = null; try { // Query the Database to get all the records. c = db.query( EntityTable.TicksTable.TABLENAME, tickEntryArray, null, null, null, null, null); // get all Ticks from Cursor if (c != null && c.moveToFirst()) { // loop until the end of Cursor and add each entry to Ticks ArrayList. do { Tick tickItem = buildFromCursor(c); if (tickItem != null) { ticksList.add(tickItem); } } while (c.moveToNext()); } } catch (SQLiteException se) { if (BuildConfig.DEBUG) Log.e(TAG, "getAll: ", se); } catch (Exception e) { if (BuildConfig.DEBUG) Log.e(TAG, "getAll: ", e); } finally { if (!c.isClosed()) { c.close(); } } return ticksList; } @Override public List<? extends Entity> getAll(String id) { return null; } }
package twg2.meta.test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * @author TeamworkGuy2 * @since 2015-9-7 */ public class FieldGetData { // ==== Base, Tree, Leaf! twg2.meta.test data hierarchy ==== public static interface BaseI { public int getBaseI_Field1(); public void setBaseI_Field1(int val); } public static interface Tree1I { public int getTree1I_Field1(); public void setTree1I_Field1(int val); } public static interface Tree2I { public int getTree2I_Field1(); public void setTree2I_Field1(int val); } public static interface Leaf1I extends Tree1I { public void blowInTheWind_Leaf1(); } public static interface Leaf2I extends Tree2I { public void blowInTheWind_Leaf2(); } public static class Base implements BaseI { private int baseI_Field1; @Override public int getBaseI_Field1() { return baseI_Field1; } @Override public void setBaseI_Field1(int baseI_Field1) { this.baseI_Field1 = baseI_Field1; } } public static class Tree1 implements Tree1I { private int tree1I_Field1; @Override public int getTree1I_Field1() { return tree1I_Field1; } @Override public void setTree1I_Field1(int tree1i_Field1) { tree1I_Field1 = tree1i_Field1; } } public static class Tree2 implements Tree2I { private int tree2I_Field1; @Override public int getTree2I_Field1() { return tree2I_Field1; } @Override public void setTree2I_Field1(int tree2i_Field1) { tree2I_Field1 = tree2i_Field1; } } public static class Leaf1 extends Tree1 implements Leaf1I { private double leaf1_Field1; public double getLeaf1_Field1() { return leaf1_Field1; } public void setLeaf1_Field1(double leaf1_Field1) { this.leaf1_Field1 = leaf1_Field1; } @Override public void blowInTheWind_Leaf1() { System.out.println("blow in the wind 1"); } } public static class Leaf2 extends Tree2 implements Leaf2I { private Object leaf2_Field1; private java.lang.annotation.ElementType leaf2_Field2; public Object getLeaf2_Field1() { return leaf2_Field1; } public void setLeaf2_Field1(Object leaf2_Field1) { this.leaf2_Field1 = leaf2_Field1; } public java.lang.annotation.ElementType getLeaf2_Field2() { return leaf2_Field2; } public void setLeaf2_Field2(java.lang.annotation.ElementType leaf2_Field2) { this.leaf2_Field2 = leaf2_Field2; } @Override public void blowInTheWind_Leaf2() { System.out.println("blow in the wind 2"); } } public static class BaseLeaf extends Base implements BaseI { public String baseLeaf_Field1; public String getBaseLeaf_Field1() { return baseLeaf_Field1; } public void setBaseLeaf_Field1(String baseLeaf_Field1) { this.baseLeaf_Field1 = baseLeaf_Field1; } } // ==== Bugs! twg2.meta.test classes ==== public static class Bug { private int count; private boolean t; public Bug() { } public Bug(int count, boolean t) { this.count = count; this.t = t; } public int getCount() { return count; } public void setCount(int count) { this.count = count; } public boolean isT() { return t; } public void setT(boolean t) { this.t = t; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + count; result = prime * result + (t ? 1231 : 1237); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Bug other = (Bug) obj; if (count != other.count) return false; if (t != other.t) return false; return true; } @Override public String toString() { return "Bug [count=" + count + ", t=" + t + "]"; } } public static class ColonyBug extends Bug { private boolean manager; private String id; public ColonyBug() { super(); } public ColonyBug(int count, boolean t, boolean manager, String id) { super(count, t); this.manager = manager; this.id = id; } public boolean isManager() { return manager; } public void setManager(boolean manager) { this.manager = manager; } public String getId() { return id; } public void setId(String id) { this.id = id; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((id == null) ? 0 : id.hashCode()); result = prime * result + (manager ? 1231 : 1237); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; ColonyBug other = (ColonyBug) obj; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; if (manager != other.manager) return false; return true; } @Override public String toString() { return "ColonyBug [manager=" + manager + ", id=" + id + "]"; } } public static class Termite extends ColonyBug { private int colonyNum; private String termiteName; private StringBuilder colonyNotesBuf; public Termite() { super(); } public Termite(int colonyNum, String termiteName, int count, boolean t, boolean awesome, String id) { super(count, t, awesome, id); this.colonyNum = colonyNum; this.termiteName = termiteName; this.colonyNotesBuf = new StringBuilder(); } public int getColonyNum() { return colonyNum; } public void setColonyNum(int colonyNum) { this.colonyNum = colonyNum; } public String getTermiteName() { return termiteName; } public void setTermiteName(String termiteName) { this.termiteName = termiteName; } public StringBuilder getColonyNotesBuf() { return colonyNotesBuf; } public void setColonyNotesBuf(StringBuilder colonyNotesBuf) { this.colonyNotesBuf = colonyNotesBuf; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((colonyNotesBuf == null) ? 0 : colonyNotesBuf.hashCode()); result = prime * result + colonyNum; result = prime * result + ((termiteName == null) ? 0 : termiteName.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; Termite other = (Termite) obj; if (colonyNotesBuf == null) { if (other.colonyNotesBuf != null) return false; } else if (!colonyNotesBuf.equals(other.colonyNotesBuf)) return false; if (colonyNum != other.colonyNum) return false; if (termiteName == null) { if (other.termiteName != null) return false; } else if (!termiteName.equals(other.termiteName)) return false; return true; } @Override public String toString() { return "Termite [colonyNum=" + colonyNum + ", termiteName=" + termiteName + ", colonyNotesBuf=" + colonyNotesBuf + "]"; } } public static class TermiteColony { private Termite boss; private List<Termite> termites; private long memPool; private char[] colonyName; public TermiteColony() { } public TermiteColony(Termite boss, List<Termite> termites, long memPool, String colonyName) { this.boss = boss; this.termites = termites; this.memPool = memPool; this.colonyName = colonyName.toCharArray(); } public Termite getBoss() { return boss; } public void setBoss(Termite boss) { this.boss = boss; } public List<Termite> getTermites() { return termites; } public void setTermites(List<Termite> termites) { this.termites = termites; } public long getMemPool() { return memPool; } public void setMemPool(long memPool) { this.memPool = memPool; } public char[] getColonyName() { return colonyName; } public void setColonyName(char[] colonyName) { this.colonyName = colonyName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((boss == null) ? 0 : boss.hashCode()); result = prime * result + Arrays.hashCode(colonyName); result = prime * result + (int) (memPool ^ (memPool >>> 32)); result = prime * result + ((termites == null) ? 0 : termites.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TermiteColony other = (TermiteColony) obj; if (boss == null) { if (other.boss != null) return false; } else if (!boss.equals(other.boss)) return false; if (!Arrays.equals(colonyName, other.colonyName)) return false; if (memPool != other.memPool) return false; if (termites == null) { if (other.termites != null) return false; } else if (!termites.equals(other.termites)) return false; return true; } @Override public String toString() { return "TermiteColony [boss=" + boss + ", termites=" + termites + ", memPool=" + memPool + ", colonyName=" + Arrays.toString(colonyName) + "]"; } } public static class Dummy { public static Termite newTermite1() { Termite bug = new Termite(42, "termite-42", 3, false, true, "id2332"); bug.setColonyNotesBuf(new StringBuilder("fastest termite ever!")); return bug; } public static Termite newTermite2() { Termite bug = new Termite(19, "first termite!", 12345, false, true, "IDs, IDs, IDS for all!"); bug.setColonyNotesBuf(new StringBuilder("witty termite description -HERE-")); return bug; } public static Termite newTermiteBoss() { Termite bug = new Termite(423, "termite-1", 83, false, true, "id423"); bug.setColonyNotesBuf(new StringBuilder("bossiest termite ever!")); return bug; } public static TermiteColony newTermiteColony1() { Termite boss = newTermiteBoss(); Termite bug1 = newTermite1(); Termite bug2 = newTermite2(); TermiteColony colony = new TermiteColony(boss, new ArrayList<>(Arrays.asList(bug1, bug2)), 1234567890, "names, tames"); return colony; } } public static class NotAccessible { private List<String> myPasswords; public NotAccessible() { this.myPasswords = new ArrayList<>(); this.myPasswords.add("what does the fox say?"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.impl; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.http.HttpResponse; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.SolrjNamedThreadFactory; import org.apache.solr.util.ExternalPaths; import org.junit.BeforeClass; import org.junit.Test; public class ConcurrentUpdateSolrServerTest extends SolrJettyTestBase { /** * Mock endpoint where the CUSS being tested in this class sends requests. */ public static class TestServlet extends HttpServlet implements JavaBinUpdateRequestCodec.StreamingUpdateHandler { private static final long serialVersionUID = 1L; public static void clear() { lastMethod = null; headers = null; parameters = null; errorCode = null; numReqsRcvd.set(0); numDocsRcvd.set(0); } public static Integer errorCode = null; public static String lastMethod = null; public static HashMap<String,String> headers = null; public static Map<String,String[]> parameters = null; public static AtomicInteger numReqsRcvd = new AtomicInteger(0); public static AtomicInteger numDocsRcvd = new AtomicInteger(0); public static void setErrorCode(Integer code) { errorCode = code; } private void setHeaders(HttpServletRequest req) { Enumeration<String> headerNames = req.getHeaderNames(); headers = new HashMap<>(); while (headerNames.hasMoreElements()) { final String name = headerNames.nextElement(); headers.put(name, req.getHeader(name)); } } private void setParameters(HttpServletRequest req) { //parameters = req.getParameterMap(); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { numReqsRcvd.incrementAndGet(); lastMethod = "post"; recordRequest(req, resp); InputStream reqIn = req.getInputStream(); JavaBinUpdateRequestCodec javabin = new JavaBinUpdateRequestCodec(); for (;;) { try { javabin.unmarshal(reqIn, this); } catch (EOFException e) { break; // this is expected } } } private void recordRequest(HttpServletRequest req, HttpServletResponse resp) { setHeaders(req); setParameters(req); if (null != errorCode) { try { resp.sendError(errorCode); } catch (IOException e) { throw new RuntimeException("sendError IO fail in TestServlet", e); } } } @Override public void update(SolrInputDocument document, UpdateRequest req, Integer commitWithin, Boolean override) { numDocsRcvd.incrementAndGet(); } } // end TestServlet @BeforeClass public static void beforeTest() throws Exception { createJetty(ExternalPaths.EXAMPLE_HOME, null, null); jetty.getDispatchFilter().getServletHandler() .addServletWithMapping(TestServlet.class, "/cuss/*"); } @Test public void testConcurrentUpdate() throws Exception { TestServlet.clear(); String serverUrl = jetty.getBaseUrl().toString() + "/cuss/foo"; int cussThreadCount = 2; int cussQueueSize = 100; // for tracking callbacks from CUSS final AtomicInteger successCounter = new AtomicInteger(0); final AtomicInteger errorCounter = new AtomicInteger(0); final StringBuilder errors = new StringBuilder(); @SuppressWarnings("serial") ConcurrentUpdateSolrServer cuss = new ConcurrentUpdateSolrServer(serverUrl, cussQueueSize, cussThreadCount) { @Override public void handleError(Throwable ex) { errorCounter.incrementAndGet(); errors.append(" "+ex); } @Override public void onSuccess(HttpResponse resp) { successCounter.incrementAndGet(); } }; cuss.setParser(new BinaryResponseParser()); cuss.setRequestWriter(new BinaryRequestWriter()); cuss.setPollQueueTime(0); // ensure it doesn't block where there's nothing to do yet cuss.blockUntilFinished(); int poolSize = 5; ExecutorService threadPool = Executors.newFixedThreadPool(poolSize, new SolrjNamedThreadFactory("testCUSS")); int numDocs = 100; int numRunnables = 5; for (int r=0; r < numRunnables; r++) threadPool.execute(new SendDocsRunnable(String.valueOf(r), numDocs, cuss)); // ensure all docs are sent threadPool.awaitTermination(5, TimeUnit.SECONDS); threadPool.shutdown(); // wait until all requests are processed by CUSS cuss.blockUntilFinished(); cuss.shutdownNow(); assertEquals("post", TestServlet.lastMethod); // expect all requests to be successful int expectedSuccesses = TestServlet.numReqsRcvd.get(); assertTrue(expectedSuccesses > 0); // at least one request must have been sent assertTrue("Expected no errors but got "+errorCounter.get()+ ", due to: "+errors.toString(), errorCounter.get() == 0); assertTrue("Expected "+expectedSuccesses+" successes, but got "+successCounter.get(), successCounter.get() == expectedSuccesses); int expectedDocs = numDocs * numRunnables; assertTrue("Expected CUSS to send "+expectedDocs+" but got "+TestServlet.numDocsRcvd.get(), TestServlet.numDocsRcvd.get() == expectedDocs); } class SendDocsRunnable implements Runnable { private String id; private int numDocs; private ConcurrentUpdateSolrServer cuss; SendDocsRunnable(String id, int numDocs, ConcurrentUpdateSolrServer cuss) { this.id = id; this.numDocs = numDocs; this.cuss = cuss; } @Override public void run() { for (int d=0; d < numDocs; d++) { SolrInputDocument doc = new SolrInputDocument(); String docId = id+"_"+d; doc.setField("id", docId); UpdateRequest req = new UpdateRequest(); req.add(doc); try { cuss.request(req); } catch (Throwable t) { t.printStackTrace(); } } } } }
/* * Copyright 2012 Udo Klimaschewski * * http://UdoJava.com/ * http://about.me/udo.klimaschewski * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package com.gdgvietnam.calculator; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Stack; /** * <h1>EvalEx - Java Expression Evaluator</h1> * * <h2>Introduction</h2> EvalEx is a handy expression evaluator for Java, that * allows to evaluate simple mathematical and boolean expressions. <br> * Key Features: * <ul> * <li>Uses BigDecimal for calculation and result</li> * <li>Single class implementation, very compact</li> * <li>No dependencies to external libraries</li> * <li>Precision and rounding mode can be set</li> * <li>Supports variables</li> * <li>Standard boolean and mathematical operators</li> * <li>Standard basic mathematical and boolean functions</li> * <li>Custom functions and operators can be added at runtime</li> * </ul> * <br> * <h2>Examples</h2> * * <pre> * BigDecimal result = null; * * Expression expression = new Expression("1+1/3"); * result = expression.eval(): * expression.setPrecision(2); * result = expression.eval(): * * result = new Expression("(3.4 + -4.1)/2").eval(); * * result = new Expression("SQRT(a^2 + b^2").with("a","2.4").and("b","9.253").eval(); * * BigDecimal a = new BigDecimal("2.4"); * BigDecimal b = new BigDecimal("9.235"); * result = new Expression("SQRT(a^2 + b^2").with("a",a).and("b",b).eval(); * * result = new Expression("2.4/PI").setPrecision(128).setRoundingMode(RoundingMode.UP).eval(); * * result = new Expression("random() > 0.5").eval(); * * result = new Expression("not(x<7 || sqrt(max(x,9)) <= 3))").with("x","22.9").eval(); * </pre> * * <br> * <h2>Supported Operators</h2> * <table> * <tr> * <th>Mathematical Operators</th> * </tr> * <tr> * <th>Operator</th> * <th>Description</th> * </tr> * <tr> * <td>+</td> * <td>Additive operator</td> * </tr> * <tr> * <td>-</td> * <td>Subtraction operator</td> * </tr> * <tr> * <td>*</td> * <td>Multiplication operator</td> * </tr> * <tr> * <td>/</td> * <td>Division operator</td> * </tr> * <tr> * <td>%</td> * <td>Remainder operator (Modulo)</td> * </tr> * <tr> * <td>^</td> * <td>Power operator</td> * </tr> * </table> * <br> * <table> * <tr> * <th>Boolean Operators<sup>*</sup></th> * </tr> * <tr> * <th>Operator</th> * <th>Description</th> * </tr> * <tr> * <td>=</td> * <td>Equals</td> * </tr> * <tr> * <td>==</td> * <td>Equals</td> * </tr> * <tr> * <td>!=</td> * <td>Not equals</td> * </tr> * <tr> * <td>&lt;&gt;</td> * <td>Not equals</td> * </tr> * <tr> * <td>&lt;</td> * <td>Less than</td> * </tr> * <tr> * <td>&lt;=</td> * <td>Less than or equal to</td> * </tr> * <tr> * <td>&gt;</td> * <td>Greater than</td> * </tr> * <tr> * <td>&gt;=</td> * <td>Greater than or equal to</td> * </tr> * <tr> * <td>&amp;&amp;</td> * <td>Boolean and</td> * </tr> * <tr> * <td>||</td> * <td>Boolean or</td> * </tr> * </table> * *Boolean operators result always in a BigDecimal value of 1 or 0 (zero). Any * non-zero value is treated as a _true_ value. Boolean _not_ is implemented by * a function. <br> * <h2>Supported Functions</h2> * <table> * <tr> * <th>Function<sup>*</sup></th> * <th>Description</th> * </tr> * <tr> * <td>NOT(<i>expression</i>)</td> * <td>Boolean negation, 1 (means true) if the expression is not zero</td> * </tr> * <tr> * <td>IF(<i>condition</i>,<i>value_if_true</i>,<i>value_if_false</i>)</td> * <td>Returns one value if the condition evaluates to true or the other if it * evaluates to false</td> * </tr> * <tr> * <td>RANDOM()</td> * <td>Produces a random number between 0 and 1</td> * </tr> * <tr> * <td>MIN(<i>e1</i>,<i>e2</i>, <i>...</i>)</td> * <td>Returns the smallest of the given expressions</td> * </tr> * <tr> * <td>MAX(<i>e1</i>,<i>e2</i>, <i>...</i>)</td> * <td>Returns the biggest of the given expressions</td> * </tr> * <tr> * <td>ABS(<i>expression</i>)</td> * <td>Returns the absolute (non-negative) value of the expression</td> * </tr> * <tr> * <td>ROUND(<i>expression</i>,precision)</td> * <td>Rounds a value to a certain number of digits, uses the current rounding * mode</td> * </tr> * <tr> * <td>FLOOR(<i>expression</i>)</td> * <td>Rounds the value down to the nearest integer</td> * </tr> * <tr> * <td>CEILING(<i>expression</i>)</td> * <td>Rounds the value up to the nearest integer</td> * </tr> * <tr> * <td>LOG(<i>expression</i>)</td> * <td>Returns the natural logarithm (base e) of an expression</td> * </tr> * <tr> * <td>LOG10(<i>expression</i>)</td> * <td>Returns the common logarithm (base 10) of an expression</td> * </tr> * <tr> * <td>SQRT(<i>expression</i>)</td> * <td>Returns the square root of an expression</td> * </tr> * <tr> * <td>SIN(<i>expression</i>)</td> * <td>Returns the trigonometric sine of an angle (in degrees)</td> * </tr> * <tr> * <td>COS(<i>expression</i>)</td> * <td>Returns the trigonometric cosine of an angle (in degrees)</td> * </tr> * <tr> * <td>TAN(<i>expression</i>)</td> * <td>Returns the trigonometric tangens of an angle (in degrees)</td> * </tr> * <tr> * <td>ASIN(<i>expression</i>)</td> * <td>Returns the angle of asin (in degrees)</td> * </tr> * <tr> * <td>ACOS(<i>expression</i>)</td> * <td>Returns the angle of acos (in degrees)</td> * </tr> * <tr> * <td>ATAN(<i>expression</i>)</td> * <td>Returns the angle of atan (in degrees)</td> * </tr> * <tr> * <td>SINH(<i>expression</i>)</td> * <td>Returns the hyperbolic sine of a value</td> * </tr> * <tr> * <td>COSH(<i>expression</i>)</td> * <td>Returns the hyperbolic cosine of a value</td> * </tr> * <tr> * <td>TANH(<i>expression</i>)</td> * <td>Returns the hyperbolic tangens of a value</td> * </tr> * <tr> * <td>RAD(<i>expression</i>)</td> * <td>Converts an angle measured in degrees to an approximately equivalent * angle measured in radians</td> * </tr> * <tr> * <td>DEG(<i>expression</i>)</td> * <td>Converts an angle measured in radians to an approximately equivalent * angle measured in degrees</td> * </tr> * </table> * *Functions names are case insensitive. <br> * <h2>Supported Constants</h2> * <table> * <tr> * <th>Constant</th> * <th>Description</th> * </tr> * <tr> * <td>PI</td> * <td>The value of <i>PI</i>, exact to 100 digits</td> * </tr> * <tr> * <td>TRUE</td> * <td>The value one</td> * </tr> * <tr> * <td>FALSE</td> * <td>The value zero</td> * </tr> * </table> * * <h2>Add Custom Operators</h2> * * Custom operators can be added easily, simply create an instance of * `Expression.Operator` and add it to the expression. Parameters are the * operator string, its precedence and if it is left associative. The operators * `eval()` method will be called with the BigDecimal values of the operands. * All existing operators can also be overridden. <br> * For example, add an operator `x >> n`, that moves the decimal point of _x_ * _n_ digits to the right: * * <pre> * Expression e = new Expression("2.1234 >> 2"); * * e.addOperator(e.new Operator(">>", 30, true) { * {@literal @}Override * public BigDecimal eval(BigDecimal v1, BigDecimal v2) { * return v1.movePointRight(v2.toBigInteger().intValue()); * } * }); * * e.eval(); // returns 212.34 * </pre> * * <br> * <h2>Add Custom Functions</h2> * * Adding custom functions is as easy as adding custom operators. Create an * instance of `Expression.Function`and add it to the expression. Parameters are * the function name and the count of required parameters. The functions * `eval()` method will be called with a list of the BigDecimal parameters. All * existing functions can also be overridden. <br> * A <code>-1</code> as the number of parameters denotes a variable number of arguments.<br> * For example, add a function `average(a,b,c)`, that will calculate the average * value of a, b and c: <br> * * <pre> * Expression e = new Expression("2 * average(12,4,8)"); * * e.addFunction(e.new Function("average", 3) { * {@literal @}Override * public BigDecimal eval(List<BigDecimal> parameters) { * BigDecimal sum = parameters.get(0).add(parameters.get(1)).add(parameters.get(2)); * return sum.divide(new BigDecimal(3)); * } * }); * * e.eval(); // returns 16 * </pre> * * The software is licensed under the MIT Open Source license (see LICENSE * file). <br> * <ul> * <li>The *power of* operator (^) implementation was copied from [Stack * Overflow * ](http://stackoverflow.com/questions/3579779/how-to-do-a-fractional-power * -on-bigdecimal-in-java) Thanks to Gene Marin</li> * <li>The SQRT() function implementation was taken from the book [The Java * Programmers Guide To numerical * Computing](http://www.amazon.de/Java-Number-Cruncher * -Programmers-Numerical/dp/0130460419) (Ronald Mak, 2002)</li> * </ul> * * @author Udo Klimaschewski (http://about.me/udo.klimaschewski) */ public class Expression { /** * Definition of PI as a constant, can be used in expressions as variable. */ public static final BigDecimal PI = new BigDecimal( "3.1415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679"); /** * The {@link MathContext} to use for calculations. */ private MathContext mc = null; /** * The original infix expression. */ private String expression = null; /** * The cached RPN (Reverse Polish Notation) of the expression. */ private List<String> rpn = null; /** * All defined operators with name and implementation. */ private Map<String, Operator> operators = new HashMap<String, Operator>(); /** * All defined functions with name and implementation. */ private Map<String, Function> functions = new HashMap<String, Function>(); /** * All defined variables with name and value. */ private Map<String, BigDecimal> variables = new HashMap<String, BigDecimal>(); /** * What character to use for decimal separators. */ private static final char decimalSeparator = '.'; /** * What character to use for minus sign (negative values). */ private static final char minusSign = '-'; /** * The BigDecimal representation of the left parenthesis, * used for parsing varying numbers of function parameters. */ private static final BigDecimal PARAMS_START = new BigDecimal(0); /** * The expression evaluators exception class. */ public static class ExpressionException extends RuntimeException { private static final long serialVersionUID = 1118142866870779047L; public ExpressionException(String message) { super(message); } } /** * Abstract definition of a supported expression function. A function is * defined by a name, the number of parameters and the actual processing * implementation. */ public abstract class Function { /** * Name of this function. */ private String name; /** * Number of parameters expected for this function. * <code>-1</code> denotes a variable number of parameters. */ private int numParams; /** * Creates a new function with given name and parameter count. * * @param name * The name of the function. * @param numParams * The number of parameters for this function. * <code>-1</code> denotes a variable number of parameters. */ public Function(String name, int numParams) { this.name = name.toUpperCase(Locale.ROOT); this.numParams = numParams; } public String getName() { return name; } public int getNumParams() { return numParams; } public boolean numParamsVaries() { return numParams < 0; } /** * Implementation for this function. * * @param parameters * Parameters will be passed by the expression evaluator as a * {@link List} of {@link BigDecimal} values. * @return The function must return a new {@link BigDecimal} value as a * computing result. */ public abstract BigDecimal eval(List<BigDecimal> parameters); } /** * Abstract definition of a supported operator. An operator is defined by * its name (pattern), precedence and if it is left- or right associative. */ public abstract class Operator { /** * This operators name (pattern). */ private String oper; /** * Operators precedence. */ private int precedence; /** * Operator is left associative. */ private boolean leftAssoc; /** * Creates a new operator. * * @param oper * The operator name (pattern). * @param precedence * The operators precedence. * @param leftAssoc * <code>true</code> if the operator is left associative, * else <code>false</code>. */ public Operator(String oper, int precedence, boolean leftAssoc) { this.oper = oper; this.precedence = precedence; this.leftAssoc = leftAssoc; } public String getOper() { return oper; } public int getPrecedence() { return precedence; } public boolean isLeftAssoc() { return leftAssoc; } /** * Implementation for this operator. * * @param v1 * Operand 1. * @param v2 * Operand 2. * @return The result of the operation. */ public abstract BigDecimal eval(BigDecimal v1, BigDecimal v2); } /** * Expression tokenizer that allows to iterate over a {@link String} * expression token by token. Blank characters will be skipped. */ private class Tokenizer implements Iterator<String> { /** * Actual position in expression string. */ private int pos = 0; /** * The original input expression. */ private String input; /** * The previous token or <code>null</code> if none. */ private String previousToken; /** * Creates a new tokenizer for an expression. * * @param input * The expression string. */ public Tokenizer(String input) { this.input = input.trim(); } @Override public boolean hasNext() { return (pos < input.length()); } /** * Peek at the next character, without advancing the iterator. * * @return The next character or character 0, if at end of string. */ private char peekNextChar() { if (pos < (input.length() - 1)) { return input.charAt(pos + 1); } else { return 0; } } @Override public String next() { StringBuilder token = new StringBuilder(); if (pos >= input.length()) { return previousToken = null; } char ch = input.charAt(pos); while (Character.isWhitespace(ch) && pos < input.length()) { ch = input.charAt(++pos); } if (Character.isDigit(ch)) { while ((Character.isDigit(ch) || ch == decimalSeparator || ch == 'e' || ch == 'E' || (ch == minusSign && token.length() > 0 && ('e'==token.charAt(token.length()-1) || 'E'==token.charAt(token.length()-1))) || (ch == '+' && token.length() > 0 && ('e'==token.charAt(token.length()-1) || 'E'==token.charAt(token.length()-1))) ) && (pos < input.length())) { token.append(input.charAt(pos++)); ch = pos == input.length() ? 0 : input.charAt(pos); } } else if (ch == minusSign && Character.isDigit(peekNextChar()) && ("(".equals(previousToken) || ",".equals(previousToken) || previousToken == null || operators .containsKey(previousToken))) { token.append(minusSign); pos++; token.append(next()); } else if (Character.isLetter(ch) || (ch == '_')) { while ((Character.isLetter(ch) || Character.isDigit(ch) || (ch == '_')) && (pos < input.length())) { token.append(input.charAt(pos++)); ch = pos == input.length() ? 0 : input.charAt(pos); } } else if (ch == '(' || ch == ')' || ch == ',') { token.append(ch); pos++; } else { while (!Character.isLetter(ch) && !Character.isDigit(ch) && ch != '_' && !Character.isWhitespace(ch) && ch != '(' && ch != ')' && ch != ',' && (pos < input.length())) { token.append(input.charAt(pos)); pos++; ch = pos == input.length() ? 0 : input.charAt(pos); if (ch == minusSign) { break; } } if (!operators.containsKey(token.toString())) { throw new ExpressionException("Unknown operator '" + token + "' at position " + (pos - token.length() + 1)); } } return previousToken = token.toString(); } @Override public void remove() { throw new ExpressionException("remove() not supported"); } /** * Get the actual character position in the string. * * @return The actual character position. */ public int getPos() { return pos; } } /** * Creates a new expression instance from an expression string with a given * default match context of {@link MathContext#DECIMAL32}. * * @param expression * The expression. E.g. <code>"2.4*sin(3)/(2-4)"</code> or * <code>"sin(y)>0 & max(z, 3)>3"</code> */ public Expression(String expression) { this(expression, MathContext.DECIMAL32); } /** * Creates a new expression instance from an expression string with a given * default match context. * * @param expression * The expression. E.g. <code>"2.4*sin(3)/(2-4)"</code> or * <code>"sin(y)>0 & max(z, 3)>3"</code> * @param defaultMathContext * The {@link MathContext} to use by default. */ public Expression(String expression, MathContext defaultMathContext) { this.mc = defaultMathContext; this.expression = expression; addOperator(new Operator("+", 20, true) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.add(v2, mc); } }); addOperator(new Operator("-", 20, true) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.subtract(v2, mc); } }); addOperator(new Operator("*", 30, true) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.multiply(v2, mc); } }); addOperator(new Operator("/", 30, true) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.divide(v2, mc); } }); addOperator(new Operator("%", 30, true) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.remainder(v2, mc); } }); addOperator(new Operator("^", 40, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { /*- * Thanks to Gene Marin: * http://stackoverflow.com/questions/3579779/how-to-do-a-fractional-power-on-bigdecimal-in-java */ int signOf2 = v2.signum(); double dn1 = v1.doubleValue(); v2 = v2.multiply(new BigDecimal(signOf2)); // n2 is now positive BigDecimal remainderOf2 = v2.remainder(BigDecimal.ONE); BigDecimal n2IntPart = v2.subtract(remainderOf2); BigDecimal intPow = v1.pow(n2IntPart.intValueExact(), mc); BigDecimal doublePow = new BigDecimal(Math.pow(dn1, remainderOf2.doubleValue())); BigDecimal result = intPow.multiply(doublePow, mc); if (signOf2 == -1) { result = BigDecimal.ONE.divide(result, mc.getPrecision(), RoundingMode.HALF_UP); } return result; } }); addOperator(new Operator("&&", 4, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { boolean b1 = !v1.equals(BigDecimal.ZERO); boolean b2 = !v2.equals(BigDecimal.ZERO); return b1 && b2 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("||", 2, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { boolean b1 = !v1.equals(BigDecimal.ZERO); boolean b2 = !v2.equals(BigDecimal.ZERO); return b1 || b2 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator(">", 10, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) == 1 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator(">=", 10, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) >= 0 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("<", 10, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) == -1 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("<=", 10, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) <= 0 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("=", 7, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) == 0 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("==", 7, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return operators.get("=").eval(v1, v2); } }); addOperator(new Operator("!=", 7, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return v1.compareTo(v2) != 0 ? BigDecimal.ONE : BigDecimal.ZERO; } }); addOperator(new Operator("<>", 7, false) { @Override public BigDecimal eval(BigDecimal v1, BigDecimal v2) { return operators.get("!=").eval(v1, v2); } }); addFunction(new Function("NOT", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { boolean zero = parameters.get(0).compareTo(BigDecimal.ZERO) == 0; return zero ? BigDecimal.ONE : BigDecimal.ZERO; } }); addFunction(new Function("IF", 3) { @Override public BigDecimal eval(List<BigDecimal> parameters) { boolean isTrue = !parameters.get(0).equals(BigDecimal.ZERO); return isTrue ? parameters.get(1) : parameters.get(2); } }); addFunction(new Function("RANDOM", 0) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.random(); return new BigDecimal(d, mc); } }); addFunction(new Function("SIN", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.sin(Math.toRadians(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("COS", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.cos(Math.toRadians(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("TAN", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.tan(Math.toRadians(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("ASIN", 1) { // added by av @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.toDegrees(Math.asin(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("ACOS", 1) { // added by av @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.toDegrees(Math.acos(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("ATAN", 1) { // added by av @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.toDegrees(Math.atan(parameters.get(0) .doubleValue())); return new BigDecimal(d, mc); } }); addFunction(new Function("SINH", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.sinh(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("COSH", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.cosh(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("TANH", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.tanh(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("RAD", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.toRadians(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("DEG", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.toDegrees(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("MAX", -1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { if (parameters.size() == 0) { throw new ExpressionException("MAX requires at least one parameter"); } BigDecimal max = null; for (BigDecimal parameter : parameters) { if (max == null || parameter.compareTo(max) > 0) { max = parameter; } } return max; } }); addFunction(new Function("MIN", -1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { if (parameters.size() == 0) { throw new ExpressionException("MIN requires at least one parameter"); } BigDecimal min = null; for (BigDecimal parameter : parameters) { if (min == null || parameter.compareTo(min) < 0) { min = parameter; } } return min; } }); addFunction(new Function("ABS", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { return parameters.get(0).abs(mc); } }); addFunction(new Function("LOG", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.log(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("LOG10", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { double d = Math.log10(parameters.get(0).doubleValue()); return new BigDecimal(d, mc); } }); addFunction(new Function("ROUND", 2) { @Override public BigDecimal eval(List<BigDecimal> parameters) { BigDecimal toRound = parameters.get(0); int precision = parameters.get(1).intValue(); return toRound.setScale(precision, mc.getRoundingMode()); } }); addFunction(new Function("FLOOR", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { BigDecimal toRound = parameters.get(0); return toRound.setScale(0, RoundingMode.FLOOR); } }); addFunction(new Function("CEILING", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { BigDecimal toRound = parameters.get(0); return toRound.setScale(0, RoundingMode.CEILING); } }); addFunction(new Function("SQRT", 1) { @Override public BigDecimal eval(List<BigDecimal> parameters) { /* * From The Java Programmers Guide To numerical Computing * (Ronald Mak, 2003) */ BigDecimal x = parameters.get(0); if (x.compareTo(BigDecimal.ZERO) == 0) { return new BigDecimal(0); } if (x.signum() < 0) { throw new ExpressionException( "Argument to SQRT() function must not be negative"); } BigInteger n = x.movePointRight(mc.getPrecision() << 1) .toBigInteger(); int bits = (n.bitLength() + 1) >> 1; BigInteger ix = n.shiftRight(bits); BigInteger ixPrev; do { ixPrev = ix; ix = ix.add(n.divide(ix)).shiftRight(1); // Give other threads a chance to work; Thread.yield(); } while (ix.compareTo(ixPrev) != 0); return new BigDecimal(ix, mc.getPrecision()); } }); variables.put("PI", PI); variables.put("TRUE", BigDecimal.ONE); variables.put("FALSE", BigDecimal.ZERO); } /** * Is the string a number? * * @param st * The string. * @return <code>true</code>, if the input string is a number. */ private boolean isNumber(String st) { if (st.charAt(0) == minusSign && st.length() == 1) return false; if (st.charAt(0) == '+' && st.length() == 1) return false; if (st.charAt(0) == 'e' || st.charAt(0) == 'E') return false; for (char ch : st.toCharArray()) { if (!Character.isDigit(ch) && ch != minusSign && ch != decimalSeparator && ch != 'e' && ch != 'E' && ch != '+') return false; } return true; } /** * Implementation of the <i>Shunting Yard</i> algorithm to transform an * infix expression to a RPN expression. * * @param expression * The input expression in infx. * @return A RPN representation of the expression, with each token as a list * member. */ private List<String> shuntingYard(String expression) { List<String> outputQueue = new ArrayList<String>(); Stack<String> stack = new Stack<String>(); Tokenizer tokenizer = new Tokenizer(expression); String lastFunction = null; String previousToken = null; while (tokenizer.hasNext()) { String token = tokenizer.next(); if (isNumber(token)) { outputQueue.add(token); } else if (variables.containsKey(token)) { outputQueue.add(token); } else if (functions.containsKey(token.toUpperCase(Locale.ROOT))) { stack.push(token); lastFunction = token; } else if (Character.isLetter(token.charAt(0))) { stack.push(token); } else if (",".equals(token)) { while (!stack.isEmpty() && !"(".equals(stack.peek())) { outputQueue.add(stack.pop()); } if (stack.isEmpty()) { throw new ExpressionException("Parse error for function '" + lastFunction + "'"); } } else if (operators.containsKey(token)) { Operator o1 = operators.get(token); String token2 = stack.isEmpty() ? null : stack.peek(); while (operators.containsKey(token2) && ((o1.isLeftAssoc() && o1.getPrecedence() <= operators .get(token2).getPrecedence()) || (o1 .getPrecedence() < operators.get(token2) .getPrecedence()))) { outputQueue.add(stack.pop()); token2 = stack.isEmpty() ? null : stack.peek(); } stack.push(token); } else if ("(".equals(token)) { if (previousToken != null) { if (isNumber(previousToken)) { throw new ExpressionException( "Missing operator at character position " + tokenizer.getPos()); } // if the ( is preceded by a valid function, then it // denotes the start of a parameter list if (functions.containsKey(previousToken.toUpperCase(Locale.ROOT))) { outputQueue.add(token); } } stack.push(token); } else if (")".equals(token)) { while (!stack.isEmpty() && !"(".equals(stack.peek())) { outputQueue.add(stack.pop()); } if (stack.isEmpty()) { throw new RuntimeException("Mismatched parentheses"); } stack.pop(); if (!stack.isEmpty() && functions.containsKey(stack.peek().toUpperCase( Locale.ROOT))) { outputQueue.add(stack.pop()); } } previousToken = token; } while (!stack.isEmpty()) { String element = stack.pop(); if ("(".equals(element) || ")".equals(element)) { throw new RuntimeException("Mismatched parentheses"); } if (!operators.containsKey(element)) { throw new RuntimeException("Unknown operator or function: " + element); } outputQueue.add(element); } return outputQueue; } /** * Evaluates the expression. * * @return The result of the expression. */ public BigDecimal eval() { Stack<BigDecimal> stack = new Stack<BigDecimal>(); for (String token : getRPN()) { if (operators.containsKey(token)) { BigDecimal v1 = stack.pop(); BigDecimal v2 = stack.pop(); stack.push(operators.get(token).eval(v2, v1)); } else if (variables.containsKey(token)) { stack.push(variables.get(token).round(mc)); } else if (functions.containsKey(token.toUpperCase(Locale.ROOT))) { Function f = functions.get(token.toUpperCase(Locale.ROOT)); ArrayList<BigDecimal> p = new ArrayList<BigDecimal>( !f.numParamsVaries() ? f.getNumParams() : 0); // pop parameters off the stack until we hit the start of // this function's parameter list while (!stack.isEmpty() && stack.peek() != PARAMS_START) { p.add(0, stack.pop()); } if (stack.peek() == PARAMS_START) { stack.pop(); } if (!f.numParamsVaries() && p.size() != f.getNumParams()) { throw new ExpressionException("Function " + token + " expected " + f.getNumParams() + " parameters, got " + p.size()); } BigDecimal fResult = f.eval(p); stack.push(fResult); } else if ("(".equals(token)) { stack.push(PARAMS_START); } else { stack.push(new BigDecimal(token, mc)); } } return stack.pop().stripTrailingZeros(); } /** * Sets the precision for expression evaluation. * * @param precision * The new precision. * * @return The expression, allows to chain methods. */ public Expression setPrecision(int precision) { this.mc = new MathContext(precision); return this; } /** * Sets the rounding mode for expression evaluation. * * @param roundingMode * The new rounding mode. * @return The expression, allows to chain methods. */ public Expression setRoundingMode(RoundingMode roundingMode) { this.mc = new MathContext(mc.getPrecision(), roundingMode); return this; } /** * Adds an operator to the list of supported operators. * * @param operator * The operator to add. * @return The previous operator with that name, or <code>null</code> if * there was none. */ public Operator addOperator(Operator operator) { return operators.put(operator.getOper(), operator); } /** * Adds a function to the list of supported functions * * @param function * The function to add. * @return The previous operator with that name, or <code>null</code> if * there was none. */ public Function addFunction(Function function) { return functions.put(function.getName(), function); } /** * Sets a variable value. * * @param variable * The variable name. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression setVariable(String variable, BigDecimal value) { variables.put(variable, value); return this; } /** * Sets a variable value. * * @param variable * The variable to set. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression setVariable(String variable, String value) { if (isNumber(value)) variables.put(variable, new BigDecimal(value)); else { expression = expression.replaceAll("\\b" + variable + "\\b", "(" + value + ")"); rpn = null; } return this; } /** * Sets a variable value. * * @param variable * The variable to set. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression with(String variable, BigDecimal value) { return setVariable(variable, value); } /** * Sets a variable value. * * @param variable * The variable to set. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression and(String variable, String value) { return setVariable(variable, value); } /** * Sets a variable value. * * @param variable * The variable to set. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression and(String variable, BigDecimal value) { return setVariable(variable, value); } /** * Sets a variable value. * * @param variable * The variable to set. * @param value * The variable value. * @return The expression, allows to chain methods. */ public Expression with(String variable, String value) { return setVariable(variable, value); } /** * Get an iterator for this expression, allows iterating over an expression * token by token. * * @return A new iterator instance for this expression. */ public Iterator<String> getExpressionTokenizer() { return new Tokenizer(this.expression); } /** * Cached access to the RPN notation of this expression, ensures only one * calculation of the RPN per expression instance. If no cached instance * exists, a new one will be created and put to the cache. * * @return The cached RPN instance. */ private List<String> getRPN() { if (rpn == null) { rpn = shuntingYard(this.expression); validate(rpn); } return rpn; } /** * Check that the expression have enough numbers and variables to fit the * requirements of the operators and functions, also check * for only 1 result stored at the end of the evaluation. * */ private void validate(List<String> rpn) { /*- * Thanks to Norman Ramsey: * http://http://stackoverflow.com/questions/789847/postfix-notation-validation */ int counter = 0; Stack<Integer> params = new Stack<Integer>(); for (String token : rpn) { if ("(".equals(token)) { // is this a nested function call? if (!params.isEmpty()) { // increment the current function's param count // (the return of the nested function call // will be a parameter for the current function) params.set(params.size() - 1, params.peek() + 1); } // start a new parameter count params.push(0); } else if (!params.isEmpty()) { if (functions.containsKey(token.toUpperCase(Locale.ROOT))) { // remove the parameters and the ( from the counter counter -= params.pop() + 1; } else { // increment the current function's param count params.set(params.size() - 1, params.peek() + 1); } } else if (operators.containsKey(token)) { //we only have binary operators counter -= 2; } if (counter < 0) { throw new ExpressionException("Too many operators or functions at: " + token); } counter++; } if (counter > 1) { throw new ExpressionException("Too many numbers or variables"); } else if (counter < 1) { throw new ExpressionException("Empty expression"); } } /** * Get a string representation of the RPN (Reverse Polish Notation) for this * expression. * * @return A string with the RPN representation for this expression. */ public String toRPN() { StringBuilder result = new StringBuilder(); for (String st : getRPN()) { if (result.length() != 0) result.append(" "); result.append(st); } return result.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR ONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.usecases; import static org.junit.Assert.assertTrue; import java.io.File; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.Vector; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageProducer; import javax.jms.Session; import org.apache.activemq.ActiveMQConnection; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.ActiveMQMessageConsumer; import org.apache.activemq.broker.BrokerFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.broker.region.policy.PolicyEntry; import org.apache.activemq.broker.region.policy.PolicyMap; import org.apache.activemq.command.ActiveMQTopic; import org.apache.activemq.leveldb.LevelDBStore; import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter; import org.apache.activemq.util.ThreadTracker; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DurableSubProcessConcurrentCommitActivateNoDuplicateTest { private static final Logger LOG = LoggerFactory.getLogger(DurableSubProcessConcurrentCommitActivateNoDuplicateTest.class); public static final long RUNTIME = 5 * 60 * 1000; public static final int SERVER_SLEEP = 500; // max public static final int CARGO_SIZE = 600; // max public static final int MAX_CLIENTS = 2; public static final Random CLIENT_LIFETIME = new Random(30 * 1000, 2 * 60 * 1000); public static final Random CLIENT_ONLINE = new Random(30 * 1000, 40 * 1000); public static final Random CLIENT_OFFLINE = new Random(1 * 1000, 10 * 1000); public static final int CLIENT_OFFLINE_DURING_COMMIT = 2; // random(x) == x public static final Persistence PERSISTENT_ADAPTER = Persistence.KAHADB; public static final long BROKER_RESTART = -2 * 60 * 1000; public static final boolean ALLOW_SUBSCRIPTION_ABANDONMENT = true; public static final boolean CHECK_REDELIVERY = true; private BrokerService broker; private ActiveMQTopic topic; private ClientManager clientManager; private Server server; private HouseKeeper houseKeeper; private final ReentrantReadWriteLock processLock = new ReentrantReadWriteLock( true); private int restartCount = 0; private final AtomicInteger onlineCount = new AtomicInteger(0); static final Vector<Throwable> exceptions = new Vector<Throwable>(); // long form of test that found https://issues.apache.org/jira/browse/AMQ-3805 @Ignore ("short version in org.apache.activemq.usecases.DurableSubscriptionOfflineTest.testNoDuplicateOnConcurrentSendTranCommitAndActivate" + " and org.apache.activemq.usecases.DurableSubscriptionOfflineTest.testOrderOnActivateDeactivate") @Test public void testProcess() { try { server.start(); clientManager.start(); if (ALLOW_SUBSCRIPTION_ABANDONMENT) houseKeeper.start(); if (BROKER_RESTART <= 0) Thread.sleep(RUNTIME); else { long end = System.currentTimeMillis() + RUNTIME; while (true) { long now = System.currentTimeMillis(); if (now > end) break; now = end - now; now = now < BROKER_RESTART ? now : BROKER_RESTART; Thread.sleep(now); restartBroker(); } } } catch (Throwable e) { exit("ProcessTest.testProcess failed.", e); } //allow the clients to unsubscribe before finishing clientManager.setEnd(true); try { Thread.sleep(60 * 1000); } catch (InterruptedException e) { exit("ProcessTest.testProcess failed.", e); } server.done = true; try { server.join(60*1000); } catch (Exception ignored) {} processLock.writeLock().lock(); assertTrue("no exceptions: " + exceptions, exceptions.isEmpty()); LOG.info("DONE."); } private void restartBroker() throws Exception { LOG.info("Broker restart: waiting for components."); processLock.writeLock().lock(); try { destroyBroker(); startBroker(false); restartCount++; LOG.info("Broker restarted. count: " + restartCount); } finally { processLock.writeLock().unlock(); } } /** * Creates batch of messages in a transaction periodically. The last message * in the transaction is always a special message what contains info about * the whole transaction. * <p> * Notifies the clients about the created messages also. */ final class Server extends Thread { final String url = "vm://" + DurableSubProcessConcurrentCommitActivateNoDuplicateTest.getName() + "?" + "jms.redeliveryPolicy.maximumRedeliveries=2&jms.redeliveryPolicy.initialRedeliveryDelay=500&" + "jms.producerWindowSize=20971520&jms.prefetchPolicy.all=100&" + "jms.copyMessageOnSend=false&jms.disableTimeStampsByDefault=false&" + "jms.alwaysSyncSend=true&jms.dispatchAsync=false&" + "jms.watchTopicAdvisories=false&" + "waitForStart=200&create=false"; final ConnectionFactory cf = new ActiveMQConnectionFactory(url); final Object sendMutex = new Object(); final String[] cargos = new String[500]; int transRover = 0; int messageRover = 0; public volatile int committingTransaction = -1; public boolean done = false; public Server() { super("Server"); setPriority(Thread.MIN_PRIORITY); setDaemon(true); } @Override public void run() { try { while (!done) { Thread.sleep(1000); processLock.readLock().lock(); try { send(); } finally { processLock.readLock().unlock(); } } } catch (Throwable e) { exit("Server.run failed", e); } } public void send() throws JMSException { // do not create new clients now // ToDo: Test this case later. synchronized (sendMutex) { int trans = ++transRover; boolean relevantTrans = true; //random(2) > 1; ClientType clientType = relevantTrans ? ClientType .randomClientType() : null; // sends this types //int count = random(500, 700); int count = 1000; LOG.info("Sending Trans[id=" + trans + ", count=" + count + ", clientType=" + clientType + ", firstID=" + (messageRover+1) + "]"); Connection con = cf.createConnection(); Session sess = con .createSession(true, Session.SESSION_TRANSACTED); MessageProducer prod = sess.createProducer(null); for (int i = 0; i < count; i++) { Message message = sess.createMessage(); message.setIntProperty("ID", ++messageRover); message.setIntProperty("TRANS", trans); String type = clientType != null ? clientType .randomMessageType() : ClientType .randomNonRelevantMessageType(); message.setStringProperty("TYPE", type); if (CARGO_SIZE > 0) message.setStringProperty("CARGO", getCargo(random(CARGO_SIZE))); prod.send(topic, message); clientManager.onServerMessage(message); } Message message = sess.createMessage(); message.setIntProperty("ID", ++messageRover); message.setIntProperty("TRANS", trans); message.setBooleanProperty("COMMIT", true); message.setBooleanProperty("RELEVANT", relevantTrans); prod.send(topic, message); clientManager.onServerMessage(message); committingTransaction = trans; sess.commit(); committingTransaction = -1; LOG.info("Committed Trans[id=" + trans + ", count=" + count + ", clientType=" + clientType + "], ID=" + messageRover); sess.close(); con.close(); } } private String getCargo(int length) { if (length == 0) return null; if (length < cargos.length) { String result = cargos[length]; if (result == null) { result = getCargoImpl(length); cargos[length] = result; } return result; } return getCargoImpl(length); } private String getCargoImpl(int length) { StringBuilder sb = new StringBuilder(length); for (int i = length; --i >= 0;) { sb.append('a'); } return sb.toString(); } } /** * Clients listen on different messages in the topic. The 'TYPE' property * helps the client to select the proper messages. */ private enum ClientType { A("a", "b", "c"), B("c", "d", "e"), C("d", "e", "f"), D("g", "h"); public final String[] messageTypes; public final HashSet<String> messageTypeSet; public final String selector; ClientType(String... messageTypes) { this.messageTypes = messageTypes; messageTypeSet = new HashSet<String>(Arrays.asList(messageTypes)); StringBuilder sb = new StringBuilder("TYPE in ("); for (int i = 0; i < messageTypes.length; i++) { if (i > 0) sb.append(", "); sb.append('\'').append(messageTypes[i]).append('\''); } sb.append(')'); selector = sb.toString(); } public static ClientType randomClientType() { return values()[DurableSubProcessConcurrentCommitActivateNoDuplicateTest .random(values().length - 1)]; } public final String randomMessageType() { return messageTypes[DurableSubProcessConcurrentCommitActivateNoDuplicateTest .random(messageTypes.length - 1)]; } public static String randomNonRelevantMessageType() { return Integer .toString(DurableSubProcessConcurrentCommitActivateNoDuplicateTest.random(20)); } public final boolean isRelevant(String messageType) { return messageTypeSet.contains(messageType); } @Override public final String toString() { return this.name() /* + '[' + selector + ']' */; } } /** * Creates new cliens. */ private final class ClientManager extends Thread { private int clientRover = 0; private final CopyOnWriteArrayList<Client> clients = new CopyOnWriteArrayList<Client>(); private boolean end; public ClientManager() { super("ClientManager"); setDaemon(true); } public synchronized void setEnd(boolean end) { this.end = end; } @Override public void run() { try { while (true) { if (clients.size() < MAX_CLIENTS && !end) { processLock.readLock().lock(); try { createNewClient(); } finally { processLock.readLock().unlock(); } } int size = clients.size(); //sleepRandom(1000, 4000); Thread.sleep(100); } } catch (Throwable e) { exit("ClientManager.run failed.", e); } } private void createNewClient() throws JMSException { ClientType type = ClientType.randomClientType(); Client client; synchronized (server.sendMutex) { client = new Client(++clientRover, type, CLIENT_LIFETIME, CLIENT_ONLINE, CLIENT_OFFLINE); clients.add(client); } client.start(); LOG.info(client.toString() + " created. " + this); } public void removeClient(Client client) { clients.remove(client); } public void onServerMessage(Message message) throws JMSException { for (Client client : clients) { client.onServerMessage(message); } } @Override public String toString() { StringBuilder sb = new StringBuilder("ClientManager[count="); sb.append(clients.size()); sb.append(", clients="); boolean sep = false; for (Client client : clients) { if (sep) sb.append(", "); else sep = true; sb.append(client.toString()); } sb.append(']'); return sb.toString(); } } /** * Consumes massages from a durable subscription. Goes online/offline * periodically. Checks the incoming messages against the sent messages of * the server. */ private final class Client extends Thread { String url = "failover:(tcp://localhost:61656?wireFormat.maxInactivityDuration=0)?" + "jms.watchTopicAdvisories=false&" + "jms.alwaysSyncSend=true&jms.dispatchAsync=true&" + "jms.producerWindowSize=20971520&" + "jms.copyMessageOnSend=false&" + "jms.sendAcksAsync=false&" + "initialReconnectDelay=100&maxReconnectDelay=30000&" + "useExponentialBackOff=true"; final ConnectionFactory cf = new ActiveMQConnectionFactory(url); public static final String SUBSCRIPTION_NAME = "subscription"; private final int id; private final String conClientId; private final Random lifetime; private final Random online; private final Random offline; private final ClientType clientType; private final String selector; private final ConcurrentLinkedQueue<Message> waitingList = new ConcurrentLinkedQueue<Message>(); private final HashSet<Integer> processed = CHECK_REDELIVERY ? new HashSet<Integer>( 10000) : null; private ActiveMQMessageConsumer consumer = null; public Client(int id, ClientType clientType, Random lifetime, Random online, Random offline) throws JMSException { super("Client" + id); setDaemon(true); this.id = id; conClientId = "cli" + id; this.clientType = clientType; selector = "(COMMIT = true and RELEVANT = true) or " + clientType.selector; this.lifetime = lifetime; this.online = online; this.offline = offline; subscribe(); } @Override public void run() { long end = System.currentTimeMillis() + 60000; try { boolean sleep = false; while (true) { long max = end - System.currentTimeMillis(); if (max <= 0) break; /* if (sleep) offline.sleepRandom(); else sleep = true; */ Thread.sleep(100); processLock.readLock().lock(); onlineCount.incrementAndGet(); try { process(online.next()); } finally { onlineCount.decrementAndGet(); processLock.readLock().unlock(); } } if (!ALLOW_SUBSCRIPTION_ABANDONMENT || random(1) > 0) unsubscribe(); else { LOG.info("Client abandon the subscription. " + this); // housekeeper should sweep these abandoned subscriptions houseKeeper.abandonedSubscriptions.add(conClientId); } } catch (Throwable e) { exit(toString() + " failed.", e); } clientManager.removeClient(this); LOG.info(toString() + " DONE."); } private void process(long millis) throws JMSException { //long end = System.currentTimeMillis() + millis; long end = System.currentTimeMillis() + 200; long hardEnd = end + 20000; // wait to finish the transaction. boolean inTransaction = false; int transCount = 0; Connection con = openConnection(); Session sess = con.createSession(false, Session.CLIENT_ACKNOWLEDGE); consumer = (ActiveMQMessageConsumer) sess.createDurableSubscriber(topic, SUBSCRIPTION_NAME, selector, false); LOG.info(toString() + " ONLINE."); try { do { long max = end - System.currentTimeMillis(); if (max <= 0) { if (!inTransaction) { LOG.info(toString() + " done after no work!"); break; } max = hardEnd - System.currentTimeMillis(); if (max <= 0) exit("" + this + " failed: Transaction is not finished."); } Message message = consumer.receive(max); if (message == null) continue; onClientMessage(message); if (message.propertyExists("COMMIT")) { message.acknowledge(); // CLIENT_ACKNOWLEDGE int trans = message.getIntProperty("TRANS"); LOG.info("Received Trans[id=" + trans + ", count=" + transCount + "] in " + this + "."); inTransaction = false; transCount = 0; int committing = server.committingTransaction; if (committing == trans) { LOG.info("Going offline during transaction commit. messageID=" + message.getIntProperty("ID")); break; } } else { inTransaction = true; transCount++; if (1 == transCount) { LOG.info("In Trans[id=" + message.getIntProperty("TRANS") + "] first ID=" + message.getIntProperty("ID")); } } } while (true); } finally { sess.close(); con.close(); LOG.info(toString() + " OFFLINE."); // Check if the messages are in the waiting // list for long time. Message topMessage = waitingList.peek(); if (topMessage != null) checkDeliveryTime(topMessage); } } public void onServerMessage(Message message) throws JMSException { if (Boolean.TRUE.equals(message.getObjectProperty("COMMIT"))) { if (Boolean.TRUE.equals(message.getObjectProperty("RELEVANT"))) waitingList.add(message); } else { String messageType = message.getStringProperty("TYPE"); if (clientType.isRelevant(messageType)) waitingList.add(message); } } public void onClientMessage(Message message) { Message serverMessage = waitingList.poll(); try { Integer receivedId = (Integer) message.getObjectProperty("ID"); if (processed != null && processed.contains(receivedId)) LOG.info("! Message has been processed before. " + this + " redeliveredFlag=" + message.getJMSRedelivered() + ", message = " + message); if (serverMessage == null) exit("" + this + " failed: There is no next server message, but received: " + message); Integer serverId = (Integer) serverMessage .getObjectProperty("ID"); if (receivedId == null || serverId == null) exit("" + this + " failed: message ID not found.\r\n" + " received: " + message + "\r\n" + " server: " + serverMessage); if (!serverId.equals(receivedId)) { StringBuilder missingList = new StringBuilder(); Object lastTrans = null; int transCount = 0; Message nextServerMessage = serverMessage; do { Integer nextServerId = (Integer) nextServerMessage.getObjectProperty("ID"); if (nextServerId.equals(receivedId)) { if (lastTrans != null) missingList.append("Missing TRANS=").append(lastTrans).append(", size=").append(transCount).append("\r\n"); break; } Object trans = nextServerMessage.getObjectProperty("TRANS"); if (!trans.equals(lastTrans)) { if (lastTrans != null) missingList.append("Missing TRANS=").append(lastTrans).append(", size=").append(transCount).append("\r\n"); lastTrans = trans; transCount = 1; } else transCount++; } while ((nextServerMessage = waitingList.poll()) != null); exit("Missing messages!\r\n" + missingList + "Received message: " + message + "\r\n" + "Expected message: " + serverMessage); } checkDeliveryTime(message); if (processed != null) processed.add(receivedId); } catch (Throwable e) { exit("" + this + ".onClientMessage failed.\r\n" + " received: " + message + "\r\n" + " server: " + serverMessage, e); } } /** * Checks if the message was not delivered fast enough. */ public void checkDeliveryTime(Message message) throws JMSException { long creation = message.getJMSTimestamp(); long min = System.currentTimeMillis() - (offline.max + online.min) * (BROKER_RESTART > 0 ? 4 : 1); if (false && min > creation) { SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss.SSS"); exit("" + this + ".checkDeliveryTime failed. Message time: " + df.format(new Date(creation)) + ", min: " + df.format(new Date(min)) + "\r\n" + message); } } private Connection openConnection() throws JMSException { Connection con = cf.createConnection(); con.setClientID(conClientId); ((ActiveMQConnection) con).setCloseTimeout(60000); con.start(); return con; } private void subscribe() throws JMSException { processLock.readLock().lock(); try { Connection con = openConnection(); Session session = con .createSession(false, Session.AUTO_ACKNOWLEDGE); session.createDurableSubscriber(topic, SUBSCRIPTION_NAME, selector, true); session.close(); con.close(); } finally { processLock.readLock().unlock(); } } private void unsubscribe() throws JMSException { processLock.readLock().lock(); LOG.info("Unsubscribe: " + this); try { Connection con = openConnection(); Session session = con .createSession(false, Session.AUTO_ACKNOWLEDGE); session.unsubscribe(SUBSCRIPTION_NAME); session.close(); con.close(); } finally { processLock.readLock().unlock(); } } @Override public String toString() { return "Client[id=" + id + ", type=" + clientType + "] consumerId=" + (consumer != null ? consumer.getConsumerId() : "null"); } } /** * Sweeps out not-used durable subscriptions. */ private final class HouseKeeper extends Thread { private HouseKeeper() { super("HouseKeeper"); setDaemon(true); } public final CopyOnWriteArrayList<String> abandonedSubscriptions = new CopyOnWriteArrayList<String>(); @Override public void run() { while (true) { try { Thread.sleep(3 * 60 * 1000); processLock.readLock().lock(); try { sweep(); } finally { processLock.readLock().unlock(); } } catch (InterruptedException ex) { break; } catch (Throwable e) { Exception log = new Exception("HouseKeeper failed.", e); log.printStackTrace(); } } } private void sweep() throws Exception { LOG.info("Housekeeper sweeping."); int closed = 0; ArrayList<String> sweeped = new ArrayList<String>(); try { for (String clientId : abandonedSubscriptions) { LOG.info("Sweeping out subscription of " + clientId + "."); broker.getAdminView().destroyDurableSubscriber(clientId, Client.SUBSCRIPTION_NAME); sweeped.add(clientId); closed++; } } catch (Exception ignored) { LOG.info("Ex on destroy sub " + ignored); } finally { abandonedSubscriptions.removeAll(sweeped); } LOG.info("Housekeeper sweeped out " + closed + " subscriptions."); } } public static int random(int max) { return (int) (Math.random() * (max + 1)); } public static int random(int min, int max) { return random(max - min) + min; } public static void sleepRandom(int maxMillis) throws InterruptedException { Thread.sleep(random(maxMillis)); } public static void sleepRandom(int minMillis, int maxMillis) throws InterruptedException { Thread.sleep(random(minMillis, maxMillis)); } public static final class Random { final int min; final int max; Random(int min, int max) { this.min = min; this.max = max; } public int next() { return random(min, max); } public void sleepRandom() throws InterruptedException { DurableSubProcessConcurrentCommitActivateNoDuplicateTest.sleepRandom(min, max); } } public static void exit(String message) { exit(message, null); } public static void exit(String message, Throwable e) { Throwable cause = new RuntimeException(message, e); LOG.error(message, cause); exceptions.add(cause); ThreadTracker.result(); //fail(cause.toString()); System.exit(-9); } @Before public void setUp() throws Exception { topic = new ActiveMQTopic("TopicT"); startBroker(); clientManager = new ClientManager(); server = new Server(); houseKeeper = new HouseKeeper(); } @After public void tearDown() throws Exception { destroyBroker(); } private enum Persistence { MEMORY, LEVELDB, KAHADB } private void startBroker() throws Exception { startBroker(true); } private void startBroker(boolean deleteAllMessages) throws Exception { if (broker != null) return; broker = BrokerFactory.createBroker("broker:(vm://" + getName() + ")"); broker.setBrokerName(getName()); broker.setAdvisorySupport(false); broker.setDeleteAllMessagesOnStartup(deleteAllMessages); switch (PERSISTENT_ADAPTER) { case MEMORY: broker.setPersistent(false); break; case LEVELDB: File amqData = new File("activemq-data/" + getName() + "-leveldb"); if (deleteAllMessages) delete(amqData); broker.setPersistent(true); LevelDBStore amq = new LevelDBStore(); amq.setDirectory(amqData); broker.setPersistenceAdapter(amq); break; case KAHADB: File kahadbData = new File("activemq-data/" + getName() + "-kahadb"); if (deleteAllMessages) delete(kahadbData); broker.setPersistent(true); KahaDBPersistenceAdapter kahadb = new KahaDBPersistenceAdapter(); kahadb.setDirectory(kahadbData); kahadb.setJournalMaxFileLength(5 * 1024 * 1024); broker.setPersistenceAdapter(kahadb); break; } broker.addConnector("tcp://localhost:61656"); broker.getSystemUsage().getMemoryUsage().setLimit(256 * 1024 * 1024); broker.getSystemUsage().getTempUsage().setLimit(256 * 1024 * 1024); broker.getSystemUsage().getStoreUsage().setLimit(1024 * 1024 * 1024); PolicyMap policyMap = new PolicyMap(); PolicyEntry defaultEntry = new PolicyEntry(); defaultEntry.setMaxAuditDepth(20000); policyMap.setDefaultEntry(defaultEntry); broker.setDestinationPolicy(policyMap); broker.start(); } protected static String getName() { return "DurableSubProcessWithRestartTest"; } private static boolean delete(File path) { if (path == null) return true; if (path.isDirectory()) { for (File file : path.listFiles()) { delete(file); } } return path.delete(); } private void destroyBroker() throws Exception { if (broker == null) return; broker.stop(); broker = null; } }
/* * Copyright (c) 2015 PocketHub * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.pockethub.android.persistence; import android.accounts.Account; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import com.github.pockethub.android.core.GitHubRequest; import com.google.auto.factory.AutoFactory; import com.google.auto.factory.Provided; import com.meisolsson.githubsdk.core.ServiceGenerator; import com.meisolsson.githubsdk.model.Page; import com.meisolsson.githubsdk.model.Permissions; import com.meisolsson.githubsdk.model.Repository; import com.meisolsson.githubsdk.model.User; import com.meisolsson.githubsdk.service.activity.WatchingService; import com.meisolsson.githubsdk.service.repositories.RepositoryService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.TreeSet; import javax.inject.Provider; import retrofit2.Response; /** * Cache of repositories under a given organization */ @AutoFactory public class OrganizationRepositories implements PersistableResource<Repository> { private final User org; private final Context context; private final Provider<Account> accountProvider; /** * Create repositories cache for a given organization * * @param orgs * @param context * @param accountProvider */ public OrganizationRepositories(User orgs, @Provided Context context, @Provided Provider<Account> accountProvider) { this.org = orgs; this.context = context; this.accountProvider = accountProvider; } @Override public Cursor getCursor(SQLiteDatabase readableDatabase) { SQLiteQueryBuilder builder = new SQLiteQueryBuilder(); builder.setTables("repos JOIN users ON (repos.ownerId = users.id)"); return builder.query(readableDatabase, new String[] { "repos.repoId", "repos.name", "users.id", "users.name", "users.avatarurl", "repos.private", "repos.fork", "repos.description", "repos.forks", "repos.watchers", "repos.language", "repos.hasIssues", "repos.mirrorUrl", "repos.permissions_admin", "repos.permissions_pull", "repos.permissions_push" }, "repos.orgId=?", new String[] { Integer.toString(org.id().intValue()) }, null, null, null); } @Override public Repository loadFrom(Cursor cursor) { User owner = User.builder() .login(cursor.getString(3)) .id(cursor.getLong(2)) .avatarUrl(cursor.getString(4)) .build(); Permissions permissions = Permissions.builder() .admin(cursor.getInt(13) == 1) .push(cursor.getInt(14) == 1) .pull(cursor.getInt(15) == 1) .build(); return Repository.builder() .owner(owner) .name(cursor.getString(1)) .id(cursor.getLong(0)) .isPrivate(cursor.getInt(5) == 1) .isFork(cursor.getInt(6) == 1) .description(cursor.getString(7)) .forksCount(cursor.getInt(8)) .watchersCount(cursor.getInt(9)) .language(cursor.getString(10)) .hasIssues(cursor.getInt(11) == 1) .mirrorUrl(cursor.getString(12)) .permissions(permissions) .build(); } @Override public void store(SQLiteDatabase db, List<Repository> repos) { db.delete("repos", "orgId=?", new String[] { Integer.toString(org.id().intValue()) }); if (repos.isEmpty()) { return; } ContentValues values = new ContentValues(12); for (Repository repo : repos) { values.clear(); User owner = repo.owner(); values.put("repoId", repo.id()); values.put("name", repo.name()); values.put("orgId", org.id()); values.put("ownerId", owner.id()); values.put("private", repo.isPrivate() ? 1 : 0); values.put("fork", repo.isFork() ? 1 : 0); values.put("description", repo.description()); values.put("forks", repo.forksCount()); values.put("watchers", repo.watchersCount()); values.put("language", repo.language()); values.put("hasIssues", repo.hasIssues() ? 1 : 0); values.put("mirrorUrl", repo.mirrorUrl()); values.put("permissions_admin", repo.permissions().admin() ? 1 : 0); values.put("permissions_pull", repo.permissions().pull() ? 1 : 0); values.put("permissions_push", repo.permissions().push() ? 1 : 0); db.replace("repos", null, values); values.clear(); values.put("id", owner.id()); values.put("name", owner.login()); values.put("avatarurl", owner.avatarUrl()); db.replace("users", null, values); } } @Override public List<Repository> request() throws IOException { if (isAuthenticatedUser()) { Set<Repository> all = new TreeSet<>((repo1, repo2) -> { final long id1 = repo1.id(); final long id2 = repo2.id(); if (id1 > id2) { return 1; } if (id1 < id2) { return -1; } return 0; }); all.addAll(getAllItems(page -> ServiceGenerator.createService(context, RepositoryService.class) .getUserRepositories(page))); all.addAll(getAllItems(page -> ServiceGenerator.createService(context, WatchingService.class) .getWatchedRepositories(page))); return new ArrayList<>(all); } else { return getAllItems(page -> ServiceGenerator.createService(context, RepositoryService.class) .getOrganizationRepositories(org.login(), page)); } } private List<Repository> getAllItems(GitHubRequest<Response<Page<Repository>>> request) { List<Repository> repos = new ArrayList<>(); int current = 1; int last = -1; while(current != last) { Page<Repository> page = request.execute(current).blockingGet().body(); repos.addAll(page.items()); last = page.last() != null ? page.last() : -1; current = page.next() != null ? page.next() : -1; } return repos; } private boolean isAuthenticatedUser() { return org.login().equals(accountProvider.get().name); } @Override public String toString() { return getClass().getSimpleName() + '[' + org.login() + ']'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.ignite.Ignite; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.affinity.AffinityFunctionContext; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.cluster.ClusterState; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.TestRecordingCommunicationSpi; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.ListeningTestLogger; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * */ public abstract class GridExchangeFreeCellularSwitchAbstractTest extends GridCommonAbstractTest { /** Partitioned cache name. */ protected static final String PART_CACHE_NAME = "partitioned"; /** Replicated cache name. */ protected static final String REPL_CACHE_NAME = "replicated"; /** */ protected final ListeningTestLogger listeningLog = new ListeningTestLogger(log); /** * {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); TestRecordingCommunicationSpi commSpi = new TestRecordingCommunicationSpi(); cfg.setCommunicationSpi(commSpi); cfg.setCacheConfiguration(cacheConfiguration()); cfg.setClusterStateOnStart(ClusterState.INACTIVE); DataStorageConfiguration dsCfg = new DataStorageConfiguration(); DataRegionConfiguration drCfg = new DataRegionConfiguration(); drCfg.setPersistenceEnabled(true); dsCfg.setDefaultDataRegionConfiguration(drCfg); cfg.setDataStorageConfiguration(dsCfg); cfg.setGridLogger(listeningLog); return cfg; } /** * */ private CacheConfiguration<?, ?>[] cacheConfiguration() { CacheConfiguration<?, ?> partitionedCcfg = new CacheConfiguration<>(); partitionedCcfg.setName(PART_CACHE_NAME); partitionedCcfg.setWriteSynchronizationMode(FULL_SYNC); partitionedCcfg.setBackups(2); partitionedCcfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); partitionedCcfg.setAffinity(new Map6PartitionsTo6NodesTo2CellsAffinityFunction()); CacheConfiguration<?, ?> replicatedCcfg = new CacheConfiguration<>(); replicatedCcfg.setName(REPL_CACHE_NAME); replicatedCcfg.setWriteSynchronizationMode(FULL_SYNC); replicatedCcfg.setCacheMode(CacheMode.REPLICATED); replicatedCcfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); return new CacheConfiguration[] {partitionedCcfg, replicatedCcfg}; } /** * {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); cleanPersistenceDir(); } /** * {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** * */ protected void awaitForSwitchOnNodeLeft(Ignite failed) throws IgniteInterruptedCheckedException { assertTrue(GridTestUtils.waitForCondition( () -> { for (Ignite ignite : G.allGrids()) { if (ignite == failed) continue; GridDhtPartitionsExchangeFuture fut = ((IgniteEx)ignite).context().cache().context().exchange().lastTopologyFuture(); if (!fut.exchangeId().isLeft()) return false; } return true; }, 5000)); } /** * */ protected void blockRecoveryMessages() { for (Ignite ignite : G.allGrids()) { TestRecordingCommunicationSpi spi = (TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi(); spi.blockMessages(new IgniteBiPredicate<ClusterNode, Message>() { @Override public boolean apply(ClusterNode node, Message msg) { return msg.getClass().equals(GridCacheTxRecoveryRequest.class); } }); } } /** * */ protected void checkTransactionsCount( Ignite orig, int origCnt, Ignite primary, int primaryCnt, List<Ignite> backupNodes, int backupCnt, List<Ignite> nearNodes, int nearCnt, Set<GridCacheVersion> vers) { Function<Ignite, Collection<GridCacheVersion>> txs = ignite -> { Collection<IgniteInternalTx> active = ((IgniteEx)ignite).context().cache().context().tm().activeTransactions(); // Transactions originally started at backups will be presented as single element. return active.stream() .map(IgniteInternalTx::nearXidVersion) .filter(ver -> vers == null || vers.contains(ver)) .collect(Collectors.toSet()); }; if (orig != null) assertEquals(origCnt, txs.apply(orig).size()); if (primary != null && primary != orig) assertEquals(primaryCnt, txs.apply(primary).size()); for (Ignite backup : backupNodes) if (backup != orig) assertEquals(backupCnt, txs.apply(backup).size()); for (Ignite near : nearNodes) if (near != orig) assertEquals(nearCnt, txs.apply(near).size()); } /** * */ protected static class Map6PartitionsTo6NodesTo2CellsAffinityFunction extends RendezvousAffinityFunction { /** * Default constructor. */ public Map6PartitionsTo6NodesTo2CellsAffinityFunction() { super(false, 6); } /** * {@inheritDoc} */ @Override public List<List<ClusterNode>> assignPartitions(AffinityFunctionContext affCtx) { List<List<ClusterNode>> res = new ArrayList<>(6); int backups = affCtx.backups(); assert backups == 2; if (affCtx.currentTopologySnapshot().size() == 6) { List<ClusterNode> p0 = new ArrayList<>(); List<ClusterNode> p1 = new ArrayList<>(); List<ClusterNode> p2 = new ArrayList<>(); List<ClusterNode> p3 = new ArrayList<>(); List<ClusterNode> p4 = new ArrayList<>(); List<ClusterNode> p5 = new ArrayList<>(); // Cell 1. p0.add(affCtx.currentTopologySnapshot().get(0)); p0.add(affCtx.currentTopologySnapshot().get(1)); p0.add(affCtx.currentTopologySnapshot().get(2)); p1.add(affCtx.currentTopologySnapshot().get(2)); p1.add(affCtx.currentTopologySnapshot().get(0)); p1.add(affCtx.currentTopologySnapshot().get(1)); p2.add(affCtx.currentTopologySnapshot().get(1)); p2.add(affCtx.currentTopologySnapshot().get(2)); p2.add(affCtx.currentTopologySnapshot().get(0)); // Cell 2. p3.add(affCtx.currentTopologySnapshot().get(3)); p3.add(affCtx.currentTopologySnapshot().get(4)); p3.add(affCtx.currentTopologySnapshot().get(5)); p4.add(affCtx.currentTopologySnapshot().get(5)); p4.add(affCtx.currentTopologySnapshot().get(3)); p4.add(affCtx.currentTopologySnapshot().get(4)); p5.add(affCtx.currentTopologySnapshot().get(4)); p5.add(affCtx.currentTopologySnapshot().get(5)); p5.add(affCtx.currentTopologySnapshot().get(3)); res.add(p0); res.add(p1); res.add(p2); res.add(p3); res.add(p4); res.add(p5); } return res; } } /** * Specifies node starts the transaction (originating node). */ protected enum TransactionCoordinatorNode { /** Primary. */ PRIMARY, /** Backup. */ BACKUP, /** Near. */ NEAR, /** Client. */ CLIENT } }
package org.infinispan.loaders.hbase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.util.Bytes; import org.infinispan.loaders.hbase.test.HBaseCluster; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @Test(groups = "manual", testName = "loaders.hbase.HBaseFacadeTest") public class HBaseFacadeTest { private static final boolean USE_EMBEDDED = true; private static HBaseFacade HBF; private static final String TABLE_MESSAGE = "messages_" + System.currentTimeMillis(); private static final String COL_FAMILY_DATA = "data"; private static final String COL_FAMILY_METADATA = "metadata"; private static final String QUANTIFIER_VALUE = "value"; private static List<String> COL_FAMILIES = new ArrayList<String>(); private List<String> COL_FAMILIES_WITH_DATA = new ArrayList<String>(); private Map<String, Map<String, byte[]>> DATA_MAP_1 = new HashMap<String, Map<String, byte[]>>(1); private Map<String, byte[]> DATA_CELLS_1 = new HashMap<String, byte[]>(); private Map<String, byte[]> METADATA_CELLS_1 = new HashMap<String, byte[]>(); private Map<String, Map<String, byte[]>> DATA_MAP_2 = new HashMap<String, Map<String, byte[]>>(1); private Map<String, byte[]> DATA_CELLS_2 = new HashMap<String, byte[]>(); private Map<String, Map<String, byte[]>> DATA_MAP_3 = new HashMap<String, Map<String, byte[]>>(1); private Map<String, byte[]> DATA_CELLS_3 = new HashMap<String, byte[]>(); private Map<String, Map<String, byte[]>> DATA_MAP_4 = new HashMap<String, Map<String, byte[]>>(1); private Map<String, byte[]> DATA_CELLS_4 = new HashMap<String, byte[]>(); // data 1 - this one has metadata to go along with it private byte[] data1 = "<message><name>data1</name><value>something1</value></message>" .getBytes(); private String metadataStr1 = "<metadata>" + "<field><name>field1.1</name><value>blue1</value></field>" + "<field><name>field1.2</name><value>orange1</value></field>" + "<field><name>field1.3</name><value>purple1</value></field>" + "</metadata>"; private byte[] metadata1 = metadataStr1.getBytes(); // data 2 private byte[] data2 = "<message><name>data2</name><value>something2</value></message>" .getBytes(); // data 3 private byte[] data3 = "<message><name>data3</name><value>something3</value></message>" .getBytes(); // data 4 private byte[] data4 = "<message><name>data4</name><value>something4</value></message>" .getBytes(); // message keys private String messageKey1 = "message1"; private String messageKey2 = "message2"; private String messageKey3 = "message3"; private String messageKey4 = "message4"; // to simulate maintaining a separate expiration metadata table private static final String EXP_KEY_PREFIX = "exp_"; private static final String COL_FAMILY_EXP = "expiration"; private static final String EXP_VALUE_FIELD = "v"; private HBaseCluster hBaseCluster; @BeforeClass public void beforeClass() throws Exception { if (USE_EMBEDDED) hBaseCluster = new HBaseCluster(); Map<String, String> props = new HashMap<String, String>(); props.put("hbase.zookeeper.property.clientPort", Integer.toString(hBaseCluster.getZooKeeperPort())); System.out.println("************************"); HBF = new HBaseFacade(props); // prepare data for tests COL_FAMILIES_WITH_DATA.add(COL_FAMILY_DATA); COL_FAMILIES_WITH_DATA.add(COL_FAMILY_METADATA); DATA_CELLS_1.put(QUANTIFIER_VALUE, data1); DATA_MAP_1.put(COL_FAMILY_DATA, DATA_CELLS_1); METADATA_CELLS_1.put(QUANTIFIER_VALUE, metadata1); DATA_MAP_1.put(COL_FAMILY_METADATA, METADATA_CELLS_1); DATA_CELLS_2.put(QUANTIFIER_VALUE, data2); DATA_MAP_2.put(COL_FAMILY_DATA, DATA_CELLS_2); DATA_CELLS_3.put(QUANTIFIER_VALUE, data3); DATA_MAP_3.put(COL_FAMILY_DATA, DATA_CELLS_3); DATA_CELLS_4.put(QUANTIFIER_VALUE, data4); DATA_MAP_4.put(COL_FAMILY_DATA, DATA_CELLS_4); COL_FAMILIES.add(COL_FAMILY_DATA); COL_FAMILIES.add(COL_FAMILY_METADATA); COL_FAMILIES.add(COL_FAMILY_EXP); // prepare table for tests HBF.createTable(TABLE_MESSAGE, COL_FAMILIES); } @AfterClass public void afterClass() throws Exception { HBF.deleteTable(TABLE_MESSAGE); if (USE_EMBEDDED) HBaseCluster.shutdown(hBaseCluster); } /** * Tests creation and deletion of tables, as well as the method to check whether a table exists. * * @throws HBaseException */ public void tableCreateAndDelete() throws HBaseException { String testCF = "testCF"; String testDataVal = "This is some data."; String testTable = TABLE_MESSAGE + "_test"; String testField = "testField"; String testKey = "testKey"; List<String> colFamilies = Collections.singletonList(testCF); assert !HBF.tableExists(testTable) : "Table already exists."; try { HBF.createTable(testTable, colFamilies); } catch (HBaseException ex) { if (ex.getCause() instanceof TableExistsException) { System.err.println("Cannot test createTable because the " + testTable + " table already exists."); return; } else { throw ex; } } assert HBF.tableExists(testTable) : "Table not created properly."; Map<String, byte[]> dataCells1 = Collections.singletonMap(testField, testDataVal.getBytes()); Map<String, Map<String, byte[]>> testData = Collections.singletonMap(colFamilies.get(0), dataCells1); HBF.addRow(testTable, testKey, testData); testData = HBF.readRow(testTable, testKey, colFamilies); assert Arrays.equals(testData.get(colFamilies.get(0)).get(testField), testDataVal.getBytes()); HBF.removeRow(testTable, testKey); HBF.deleteTable(testTable); assert !HBF.tableExists(testTable) : "Table not deleted properly."; } /** * Tests writing a row to the table, reading its data, and removing it. The row that is written * has two column families - one for data and one for metadata. * * @throws HBaseException */ public void writeAddRow() throws HBaseException { // write the row HBF.addRow(TABLE_MESSAGE, messageKey1, DATA_MAP_1); // query the row Map<String, Map<String, byte[]>> resultMap = HBF.readRow(TABLE_MESSAGE, messageKey1, COL_FAMILIES_WITH_DATA); assert resultMap.containsKey(COL_FAMILY_DATA); Map<String, byte[]> columnFamilyData = resultMap.get(COL_FAMILY_DATA); byte[] resultData = columnFamilyData.get(QUANTIFIER_VALUE); assert Arrays.equals(resultData, data1); assert resultMap.containsKey(COL_FAMILY_METADATA); Map<String, byte[]> columnFamilyMetadata = resultMap.get(COL_FAMILY_METADATA); byte[] resultMetadata = columnFamilyMetadata.get(QUANTIFIER_VALUE); assert Arrays.equals(resultMetadata, metadata1); // remove the row and verify that it's gone HBF.removeRow(TABLE_MESSAGE, messageKey1); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey1, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); } /** * Tests writing messages, scanning the table, and removing the messages. * * @throws HBaseException */ public void writeAndScanMessageData() throws HBaseException { HBF.addRow(TABLE_MESSAGE, messageKey1, DATA_MAP_1); HBF.addRow(TABLE_MESSAGE, messageKey2, DATA_MAP_2); HBF.addRow(TABLE_MESSAGE, messageKey3, DATA_MAP_3); HBF.addRow(TABLE_MESSAGE, messageKey4, DATA_MAP_4); Map<String, byte[]> dataMap = HBF.scan(TABLE_MESSAGE, 4, COL_FAMILY_DATA, QUANTIFIER_VALUE); assert Arrays.equals(dataMap.get(messageKey1), data1); assert Arrays.equals(dataMap.get(messageKey2), data2); assert Arrays.equals(dataMap.get(messageKey3), data3); assert Arrays.equals(dataMap.get(messageKey4), data4); HBF.removeRow(TABLE_MESSAGE, messageKey1); HBF.removeRow(TABLE_MESSAGE, messageKey2); HBF.removeRow(TABLE_MESSAGE, messageKey3); HBF.removeRow(TABLE_MESSAGE, messageKey4); } /** * Tests writing messages, scanning the table for the keys, and removing the messages. * * @throws HBaseException */ public void writeAndScanMessageKeys() throws HBaseException { HBF.addRow(TABLE_MESSAGE, messageKey1, DATA_MAP_1); HBF.addRow(TABLE_MESSAGE, messageKey2, DATA_MAP_2); HBF.addRow(TABLE_MESSAGE, messageKey3, DATA_MAP_3); HBF.addRow(TABLE_MESSAGE, messageKey4, DATA_MAP_4); Set<Object> keys = HBF.scanForKeys(TABLE_MESSAGE); assert keys.contains(messageKey1) : "Did not return a key"; assert keys.contains(messageKey2) : "Did not return a key"; assert keys.contains(messageKey3) : "Did not return a key"; assert keys.contains(messageKey4) : "Did not return a key"; HBF.removeRow(TABLE_MESSAGE, messageKey1); HBF.removeRow(TABLE_MESSAGE, messageKey2); HBF.removeRow(TABLE_MESSAGE, messageKey3); HBF.removeRow(TABLE_MESSAGE, messageKey4); } /** * Tests writing messages and removing them in a bulk delete. * * @throws HBaseException */ public void writeAndRemoveRows() throws HBaseException { HBF.addRow(TABLE_MESSAGE, messageKey1, DATA_MAP_1); HBF.addRow(TABLE_MESSAGE, messageKey2, DATA_MAP_2); HBF.addRow(TABLE_MESSAGE, messageKey3, DATA_MAP_3); HBF.addRow(TABLE_MESSAGE, messageKey4, DATA_MAP_4); Set<Object> keys = new HashSet<Object>(); keys.add(messageKey1); keys.add(messageKey2); keys.add(messageKey3); keys.add(messageKey4); HBF.removeRows(TABLE_MESSAGE, keys); Map<String, Map<String, byte[]>> resultMap = HBF.readRow(TABLE_MESSAGE, messageKey1, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey2, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey3, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey4, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); } /** * Tests reading multiple rows and removing them. * * @throws HBaseException */ public void readRowsAndDelete() throws HBaseException { long tsBase = System.currentTimeMillis(); Set<Object> expKeys = new HashSet<Object>(); List<String> expColFamilies = Collections.singletonList(COL_FAMILY_EXP); // add some data rows and corresponding expiration metadata rows HBF.addRow(TABLE_MESSAGE, messageKey1, DATA_MAP_1); String expKey1 = EXP_KEY_PREFIX + String.valueOf(tsBase); expKeys.add(expKey1); HBF.addRow(TABLE_MESSAGE, expKey1, makeExpirationMap(messageKey1)); HBF.addRow(TABLE_MESSAGE, messageKey2, DATA_MAP_2); String expKey2 = EXP_KEY_PREFIX + String.valueOf(tsBase + 2); expKeys.add(expKey2); HBF.addRow(TABLE_MESSAGE, expKey2, makeExpirationMap(messageKey2)); HBF.addRow(TABLE_MESSAGE, messageKey3, DATA_MAP_3); String expKey3 = EXP_KEY_PREFIX + String.valueOf(tsBase + 4); expKeys.add(expKey3); HBF.addRow(TABLE_MESSAGE, expKey3, makeExpirationMap(messageKey3)); HBF.addRow(TABLE_MESSAGE, messageKey4, DATA_MAP_4); String expKey4 = EXP_KEY_PREFIX + String.valueOf(tsBase + 6); expKeys.add(expKey4); HBF.addRow(TABLE_MESSAGE, expKey4, makeExpirationMap(messageKey4)); // read the rows using the key prefix and a timestamp that is halfway through the entries // (should return keys for just 2 previously added items) Map<String, Map<String, Map<String, byte[]>>> rowsToPurge = HBF.readRows(TABLE_MESSAGE, EXP_KEY_PREFIX, tsBase + 3, COL_FAMILY_EXP, EXP_VALUE_FIELD); Set<Object> keysToDelete = new HashSet<Object>(); Set<Object> expKeysToDelete = new HashSet<Object>(); for (Entry<String, Map<String, Map<String, byte[]>>> entry : rowsToPurge.entrySet()) { assert expKeys.contains(entry.getKey()); expKeysToDelete.add(entry.getKey()); byte[] targetKeyBytes = entry.getValue().get(COL_FAMILY_EXP).get(EXP_VALUE_FIELD); String targetKey = Bytes.toString(targetKeyBytes); keysToDelete.add(targetKey); } assert keysToDelete.contains(messageKey1); assert keysToDelete.contains(messageKey2); assert !keysToDelete.contains(messageKey3); assert !keysToDelete.contains(messageKey4); // read the rows using the key prefix and a timestamp greater than the timestamps from the // entries // (should return keys for all previously added items) rowsToPurge = HBF.readRows(TABLE_MESSAGE, EXP_KEY_PREFIX, tsBase + 100, COL_FAMILY_EXP, EXP_VALUE_FIELD); keysToDelete = new HashSet<Object>(); expKeysToDelete = new HashSet<Object>(); for (Entry<String, Map<String, Map<String, byte[]>>> entry : rowsToPurge.entrySet()) { assert expKeys.contains(entry.getKey()); expKeysToDelete.add(entry.getKey()); byte[] targetKeyBytes = entry.getValue().get(COL_FAMILY_EXP).get(EXP_VALUE_FIELD); String targetKey = Bytes.toString(targetKeyBytes); keysToDelete.add(targetKey); } assert keysToDelete.contains(messageKey1); assert keysToDelete.contains(messageKey2); assert keysToDelete.contains(messageKey3); assert keysToDelete.contains(messageKey4); // now delete the data and expiration metadata rows HBF.removeRows(TABLE_MESSAGE, keysToDelete); HBF.removeRows(TABLE_MESSAGE, expKeysToDelete); // make sure rows were deleted Map<String, Map<String, byte[]>> resultMap = HBF.readRow(TABLE_MESSAGE, messageKey1, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, expKey1, expColFamilies); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey2, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, expKey2, expColFamilies); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey3, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, expKey3, expColFamilies); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, messageKey4, COL_FAMILIES_WITH_DATA); assert resultMap.isEmpty(); resultMap = HBF.readRow(TABLE_MESSAGE, expKey4, expColFamilies); assert resultMap.isEmpty(); } private Map<String, Map<String, byte[]>> makeExpirationMap(String value) { Map<String, byte[]> expValMap = Collections.singletonMap(EXP_VALUE_FIELD, Bytes.toBytes(value)); return Collections.singletonMap(COL_FAMILY_EXP, expValMap); } }
/* * Copyright (c) 2013 Allogy Interactive. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.allogy.app; import android.app.*; import android.content.*; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.database.Cursor; import android.graphics.drawable.BitmapDrawable; import android.media.ThumbnailUtils; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.telephony.SmsManager; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.widget.*; import com.allogy.app.R.string; import com.allogy.app.media.*; import com.allogy.app.provider.Academic; import com.allogy.app.provider.Academic.LessonFiles; import com.allogy.app.provider.Academic.Progress; import java.io.File; import java.util.List; /** * * @author Jamie Huson * */ public class LessonActivity extends BaseActivity { private BroadcastReceiver sentBroadCastReceiver; private static final int DIALOG_PROGRESS = 0; private static final int DIALOG_QUESTION = 1; @Override protected Dialog onCreateDialog(int id, Bundle args) { switch (id) { case DIALOG_PROGRESS: ProgressDialog p = new ProgressDialog(this); p.setTitle("Sending"); p.setMessage("...Please Wait..."); p.setIndeterminate(true); p.setCancelable(false); return p; case DIALOG_QUESTION: final Activity activity = this; final View layout = LayoutInflater.from(this).inflate( R.layout.dialog_ask_question, null); final EditText question = (EditText) layout .findViewById(R.id.dialog_question); AlertDialog.Builder d = new AlertDialog.Builder(this); d.setCancelable(false); d.setTitle(getResources().getString(R.string.ask_a_question)); d.setView(layout); d.setPositiveButton(getResources().getString(R.string.send), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { activity.dismissDialog(DIALOG_QUESTION); activity.showDialog(DIALOG_PROGRESS); SharedPreferences pref = PreferenceManager .getDefaultSharedPreferences(activity); String message = question.getText().toString(); String destination = pref.getString(SettingsActivity.PREF_GATEWAY, null); if (destination != null && message.length() > 0) { // for sending question String SENT = "SMS_SENT"; String DELIVERED = "SMS_DELIVERED"; PendingIntent sentPI = PendingIntent.getBroadcast( activity, 0, new Intent(SENT), 0); PendingIntent deliveredPI = PendingIntent.getBroadcast( activity, 0, new Intent(DELIVERED), 0); sentBroadCastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { switch (getResultCode()) { case Activity.RESULT_OK: activity.dismissDialog(DIALOG_PROGRESS); question.setText(""); break; // Generic failure error case SmsManager.RESULT_ERROR_GENERIC_FAILURE: // No service error case SmsManager.RESULT_ERROR_NO_SERVICE: // Null PDU error, no PDU provided case SmsManager.RESULT_ERROR_NULL_PDU: // Radio turned off error case SmsManager.RESULT_ERROR_RADIO_OFF: default: Log.i("SMSSender", "failed to send: " + getResultCode()); Toast.makeText(activity, "Failed...Try Again", Toast.LENGTH_SHORT).show(); activity.dismissDialog(DIALOG_PROGRESS); activity.showDialog(DIALOG_QUESTION); } unregisterReceiver(sentBroadCastReceiver); } }; registerReceiver(sentBroadCastReceiver, new IntentFilter("SMS_SENT")); SmsManager.getDefault().sendTextMessage( destination, null, "IM:" + mCourseID + "," + mLessonID + "/" + message, sentPI, deliveredPI); } else { activity.dismissDialog(DIALOG_PROGRESS); Toast.makeText(activity, "Enter Gateway Number", Toast.LENGTH_SHORT).show(); activity.startActivity(new Intent(activity, SettingsActivity.class)); } } }); d.setNegativeButton(getResources().getString(R.string.cancel), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { activity.dismissDialog(DIALOG_QUESTION); } }); return d.create(); } return null; } public static final String INTENT_EXTRA_PUBLISHER_ID = "LessonActivity.intentextra.publisherid"; public static final String INTENT_EXTRA_COURSE_ID = "LessonActivity.intentextra.courseid"; public static final String INTENT_EXTRA_LESSON_ID = "LessonActivity.intentextra.lessonid"; private static final int MEDIA_TYPE_AUDIO = 1; private static final int MEDIA_TYPE_VIDEO = 2; private static final int MEDIA_TYPE_FLASH = 3; private int mMediaType = 0; private String mPubID; private String mCourseID; private String mLessonID; private int mMediaId; private String mMediaUri; private String mObjectiveUri; private String mQuizUri; LinearLayout mResourceList; TextView mObjectiveView; TextView mQuestionView; Button mQuizButton; ImageView mMedia; ImageView mMediaContentImage; BitmapDrawable snippet; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); System.gc(); setContentView(R.layout.activity_lesson); Intent i = getIntent(); if (i.hasExtra(INTENT_EXTRA_PUBLISHER_ID) && i.hasExtra(INTENT_EXTRA_COURSE_ID) && i.hasExtra(INTENT_EXTRA_LESSON_ID)) { mPubID = Long.toString(i .getLongExtra(INTENT_EXTRA_PUBLISHER_ID, -1)); mCourseID = Long.toString(i .getLongExtra(INTENT_EXTRA_COURSE_ID, -1)); mLessonID = Long.toString(i .getLongExtra(INTENT_EXTRA_LESSON_ID, -1)); Log.i("LessonActivity", mPubID + " " + mCourseID + " " + mLessonID); mResourceList = (LinearLayout) findViewById(R.id.lesson_resource_list); mObjectiveView = (TextView) findViewById(R.id.lesson_objectives_button); mQuestionView = (TextView) findViewById(R.id.lesson_question_button); mQuizButton = (Button) findViewById(R.id.lesson_quiz_button); mMedia = (ImageView) findViewById(R.id.lesson_media_button); mMediaContentImage = (ImageView) findViewById(R.id.lesson_media_content); loadQuiz(); loadResourcesBooks(); loadResourcesWeb(); loadMedia(); } else { Log.v("LESSON", "MISSING INTENT EXTRAS"); return; } } private void loadMedia() { Cursor c = managedQuery( Academic.LessonFiles.CONTENT_URI, null, String.format(" %s=? AND (%s=? OR %s=? OR %s=?)", Academic.LessonFiles.LESSON_ID, Academic.LessonFiles.MEDIA_TYPE, Academic.LessonFiles.MEDIA_TYPE, Academic.LessonFiles.MEDIA_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_AUDIO), Integer.toString(Academic.CONTENT_TYPE_VIDEO), Integer.toString(Academic.CONTENT_TYPE_FLASH) }, Academic.LessonFiles.SORT_ORDER_DEFAULT); if (c.moveToFirst()) { mMediaId = c.getInt(c.getColumnIndex(Academic.LessonFiles._ID)); mMediaUri = c.getString(c.getColumnIndex(Academic.LessonFiles.URI)); int type = c.getInt(c .getColumnIndex(Academic.LessonFiles.MEDIA_TYPE)); switch (type) { case Academic.CONTENT_TYPE_AUDIO: mMediaType = MEDIA_TYPE_AUDIO; // set the media banner to show there is audio content mMediaContentImage .setBackgroundDrawable(getApplicationContext() .getResources().getDrawable( R.drawable.lesson_audio_media)); mMedia.setImageResource(R.drawable.play_button); break; case Academic.CONTENT_TYPE_VIDEO: mMediaType = MEDIA_TYPE_VIDEO; // set the media banner to a snapshot in the video snippet = new BitmapDrawable( ThumbnailUtils.createVideoThumbnail( Environment.getExternalStorageDirectory() + "/Allogy/Decrypted/" + mMediaUri.replace(".mp4", "").trim(), 1)); mMediaContentImage.setBackgroundDrawable(snippet); mMedia.setImageResource(R.drawable.play_button); break; case Academic.CONTENT_TYPE_FLASH: mMediaType = MEDIA_TYPE_FLASH; // set the media banner to show there is flash content mMediaContentImage .setBackgroundDrawable(getApplicationContext() .getResources().getDrawable( R.drawable.lesson_flash_media)); mMedia.setImageResource(R.drawable.play_button); break; } mMedia.setOnClickListener(mediaClicker); } else { // show the no media available banner } } private void loadQuiz() { Cursor c = managedQuery( Academic.LessonFiles.CONTENT_URI, null, String.format(" %s=? AND %s=?", Academic.LessonFiles.LESSON_ID, Academic.LessonFiles.MEDIA_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_QUIZ) }, Academic.LessonFiles.SORT_ORDER_DEFAULT); if (c.moveToFirst()) { String quiz = c.getString(c .getColumnIndex(Academic.LessonFiles.URI)); c.close(); if (quiz == null || quiz.compareTo("") == 0) { mQuizButton.setEnabled(false); mQuizButton.setText(getResources().getString(string.no_quiz)); } else { Cursor q = managedQuery( Progress.CONTENT_URI, null, String.format("%s=? AND %s=?", Progress.CONTENT_ID, Progress.CONTENT_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_QUIZ) }, null); if (q != null) { if (q.getCount() > 0) { mQuizButton.setEnabled(false); mQuizButton.setText("Quiz Complete"); } q.close(); } mQuizUri = quiz; } } // there is no quiz else { mQuizButton.setEnabled(false); } } private void loadResourcesBooks() { Cursor c = managedQuery( Academic.LessonFiles.CONTENT_URI, null, String.format(" %s=? AND (%s=? OR %s=?)", Academic.LessonFiles.LESSON_ID, Academic.LessonFiles.MEDIA_TYPE, Academic.LessonFiles.MEDIA_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_EPUB), Integer.toString(Academic.CONTENT_TYPE_PDF) }, Academic.LessonFiles.SORT_ORDER_DEFAULT); // reuse these views LinearLayout inflatedView; TextView inflatedText; TextView inflatedLabel; Log.i("LessonActivity", "Book Count: " + c.getCount()); for (c.moveToFirst(); !c.isAfterLast(); c.moveToNext()) { // retrieve the title of the book to display in the list item String book = c.getString(c .getColumnIndex(Academic.LessonFiles.URI)); Uri bookUri = Uri.parse(book); Log.i("LessonActivity", book); Cursor bookCursor = this.getContentResolver().query(bookUri, new String[] { Academic.Book._ID, Academic.Book.TITLE }, null, null, Academic.Book.SORT_ORDER_DEFAULT); String title = "Unknown"; if (bookCursor != null) { if (bookCursor.moveToFirst()) { title = bookCursor.getString(bookCursor .getColumnIndex(Academic.Book.TITLE)); } bookCursor.close(); } // Get our view, create a holder, and set the holder as the tag inflatedView = (LinearLayout) View.inflate(this, R.layout.list_labelled_item, null); inflatedText = (TextView) inflatedView .findViewById(R.id.list_labelled_item_text); inflatedLabel = (TextView) inflatedView .findViewById(R.id.list_labelled_item_label); inflatedLabel.setText(LessonActivity.this.getString(R.string.book)); ResourceHolder holder = new ResourceHolder(); holder.type = c.getInt(c .getColumnIndex(Academic.LessonFiles.MEDIA_TYPE)); holder.itemUri = bookUri; holder.title = inflatedText; holder.title.setText(title); inflatedView.setTag(holder); inflatedView.setOnClickListener(resourceClicker); // Add the view to the list mResourceList.addView(inflatedView); } c.close(); } private void loadResourcesWeb() { Cursor c = managedQuery( Academic.LessonFiles.CONTENT_URI, null, String.format(" %s=? AND %s=?", Academic.LessonFiles.LESSON_ID, Academic.LessonFiles.MEDIA_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_WEBSITE) }, Academic.LessonFiles.SORT_ORDER_DEFAULT); // reuse these views LinearLayout inflatedView; TextView inflatedText; TextView inflatedLabel; Log.i("LessonActivity", "Website count: " + c.getCount()); for (c.moveToFirst(); !c.isAfterLast(); c.moveToNext()) { // Get our view, create a holder, and set the holder as the tag inflatedView = (LinearLayout) View.inflate(this, R.layout.list_labelled_item, null); inflatedText = (TextView) inflatedView .findViewById(R.id.list_labelled_item_text); inflatedLabel = (TextView) inflatedView .findViewById(R.id.list_labelled_item_label); inflatedLabel.setText(LessonActivity.this .getString(R.string.website)); String uri = c .getString(c.getColumnIndex(Academic.LessonFiles.URI)); ResourceHolder holder = new ResourceHolder(); holder.type = c.getInt(c .getColumnIndex(Academic.LessonFiles.MEDIA_TYPE)); holder.itemUri = Uri.parse(uri); holder.title = inflatedText; holder.title.setText(uri); inflatedView.setTag(holder); inflatedView.setOnClickListener(resourceClicker); // Add the view to the list mResourceList.addView(inflatedView); } c.close(); } final class ResourceHolder { int type; TextView title; Uri itemUri; } // Handles "Objective click" public void onObjectivesClick(View v) { Cursor o = managedQuery(LessonFiles.CONTENT_URI, null, String.format("%s=? AND %s=?", LessonFiles.LESSON_ID, LessonFiles.MEDIA_TYPE), new String[] { mLessonID, Integer.toString(Academic.CONTENT_TYPE_HTML) }, null); if (o != null) { if (o.getCount() > 0) { o.moveToFirst(); Intent i = new Intent(this, HtmlActivity.class); i.setData(Uri.fromFile(new File(Environment .getExternalStorageDirectory() + "/Allogy/Files/" + o.getString(o.getColumnIndexOrThrow(LessonFiles.URI))))); startActivity(i); } } else { Context context = getApplicationContext(); CharSequence text = "No Objective for Lesson"; Toast toast = Toast.makeText(context, text, Toast.LENGTH_SHORT); toast.setGravity(Gravity.CENTER_VERTICAL, 0, 0); toast.show(); } } public void onAskQuestionClick(View v) { showDialog(DIALOG_QUESTION); } // handles "onTakeQuizClick" public void onTakeQuizClick(View v) { Intent i = new Intent(LessonActivity.this, QuizActivity.class); i.putExtra(QuizActivity.INTENT_EXTRA_PATH, mQuizUri); i.putExtra(QuizActivity.INTENT_EXTRA_LESSON_ID, Integer.parseInt(mLessonID)); startActivity(i); } final OnClickListener mediaClicker = new OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent(); switch (mMediaType) { case MEDIA_TYPE_AUDIO: i.setClass(LessonActivity.this, AudioPlayerActivity.class); i.putExtra(AudioPlayerActivity.INTENT_EXTRA_LESSON_FILE_ID, mMediaId); break; case MEDIA_TYPE_FLASH: i.setClass(LessonActivity.this, FlashViewerActivity.class); i.putExtra(FlashViewerActivity.LOADING_INTENT_KEY, "default"); i.putExtra(FlashViewerActivity.FILE_NAME_KEY, mMediaUri); i.putExtra(FlashViewerActivity.DATABASE_ID_KEY, Integer.toString(mMediaId)); break; case MEDIA_TYPE_VIDEO: i.setClass(LessonActivity.this, VideoPlayerActivity.class); i.putExtra(VideoPlayerActivity.INTENT_EXTRA_LESSONFILEID, mMediaId); break; } startActivity(i); } }; final OnClickListener resourceClicker = new OnClickListener() { @Override public void onClick(View view) { ResourceHolder tag = (ResourceHolder) view.getTag(); Intent i = new Intent(); switch (tag.type) { case Academic.CONTENT_TYPE_EPUB: // i.setClass(LessonActivity.this, EReaderActivity.class); // i.putExtra(EReaderActivity.EXTRA_EBOOK_TYPE, // EReaderActivity.TYPE_EPUB); // i.putExtra(EReaderActivity.EXTRA_FILE_URI, // tag.itemUri.toString()); // startActivity(i); break; case Academic.CONTENT_TYPE_PDF: Cursor c = managedQuery(tag.itemUri, new String[] { Academic.Book._ID, Academic.Book.PATH }, null, null, Academic.Book.SORT_ORDER_DEFAULT); if (c != null) { // don't forget to move to the first item! c.moveToFirst(); String bookPath = c.getString(c .getColumnIndex(Academic.Book.PATH)); Uri path = Uri.fromFile(new File(bookPath)); i.setDataAndType(path, "application/pdf"); i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); List<ResolveInfo> list = getPackageManager() .queryIntentActivities(i, PackageManager.GET_ACTIVITIES); // activity exists launch it if (list.size() > 0) { startActivity(i); } else { Log.i("LessonActivity", "NO ACTIVITY FOR INTENT"); } } break; case Academic.CONTENT_TYPE_WEBSITE: i.setAction(Intent.ACTION_VIEW); i.setData(tag.itemUri); startActivity(i); break; } } }; }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents an external VPN gateway. * * External VPN gateway is the on-premises VPN gateway(s) or another cloud provider's VPN gateway * that connects to your Google Cloud VPN gateway. * * To create a highly available VPN from Google Cloud Platform to your VPN gateway or another cloud * provider's VPN gateway, you must create a external VPN gateway resource with information about * the other gateway. * * For more information about using external VPN gateways, see Creating an HA VPN gateway and * tunnel pair to a peer VPN. (== resource_for {$api_version}.externalVpnGateways ==) * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ExternalVpnGateway extends com.google.api.client.json.GenericJson { /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * List of interfaces for this external VPN gateway. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ExternalVpnGatewayInterface> interfaces; /** * [Output Only] Type of the resource. Always compute#externalVpnGateway for externalVpnGateways. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a * hash of the labels set used for optimistic locking. The fingerprint is initially generated by * Compute Engine and changes after every request to modify or update labels. You must always * provide an up-to-date fingerprint hash in order to update or change labels, otherwise the * request will fail with error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String labelFingerprint; /** * Labels for this resource. These can only be added or modified by the setLabels method. Each * label key/value pair must comply with RFC1035. Label values may be empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Indicates the user-supplied redundancy type of this external VPN gateway. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String redundancyType; /** * [Output Only] Server-defined URL for the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public ExternalVpnGateway setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public ExternalVpnGateway setDescription(java.lang.String description) { this.description = description; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public ExternalVpnGateway setId(java.math.BigInteger id) { this.id = id; return this; } /** * List of interfaces for this external VPN gateway. * @return value or {@code null} for none */ public java.util.List<ExternalVpnGatewayInterface> getInterfaces() { return interfaces; } /** * List of interfaces for this external VPN gateway. * @param interfaces interfaces or {@code null} for none */ public ExternalVpnGateway setInterfaces(java.util.List<ExternalVpnGatewayInterface> interfaces) { this.interfaces = interfaces; return this; } /** * [Output Only] Type of the resource. Always compute#externalVpnGateway for externalVpnGateways. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of the resource. Always compute#externalVpnGateway for externalVpnGateways. * @param kind kind or {@code null} for none */ public ExternalVpnGateway setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a * hash of the labels set used for optimistic locking. The fingerprint is initially generated by * Compute Engine and changes after every request to modify or update labels. You must always * provide an up-to-date fingerprint hash in order to update or change labels, otherwise the * request will fail with error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. * @see #decodeLabelFingerprint() * @return value or {@code null} for none */ public java.lang.String getLabelFingerprint() { return labelFingerprint; } /** * A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a * hash of the labels set used for optimistic locking. The fingerprint is initially generated by * Compute Engine and changes after every request to modify or update labels. You must always * provide an up-to-date fingerprint hash in order to update or change labels, otherwise the * request will fail with error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. * @see #getLabelFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeLabelFingerprint() { return com.google.api.client.util.Base64.decodeBase64(labelFingerprint); } /** * A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a * hash of the labels set used for optimistic locking. The fingerprint is initially generated by * Compute Engine and changes after every request to modify or update labels. You must always * provide an up-to-date fingerprint hash in order to update or change labels, otherwise the * request will fail with error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. * @see #encodeLabelFingerprint() * @param labelFingerprint labelFingerprint or {@code null} for none */ public ExternalVpnGateway setLabelFingerprint(java.lang.String labelFingerprint) { this.labelFingerprint = labelFingerprint; return this; } /** * A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a * hash of the labels set used for optimistic locking. The fingerprint is initially generated by * Compute Engine and changes after every request to modify or update labels. You must always * provide an up-to-date fingerprint hash in order to update or change labels, otherwise the * request will fail with error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. * @see #setLabelFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public ExternalVpnGateway encodeLabelFingerprint(byte[] labelFingerprint) { this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint); return this; } /** * Labels for this resource. These can only be added or modified by the setLabels method. Each * label key/value pair must comply with RFC1035. Label values may be empty. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Labels for this resource. These can only be added or modified by the setLabels method. Each * label key/value pair must comply with RFC1035. Label values may be empty. * @param labels labels or {@code null} for none */ public ExternalVpnGateway setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public ExternalVpnGateway setName(java.lang.String name) { this.name = name; return this; } /** * Indicates the user-supplied redundancy type of this external VPN gateway. * @return value or {@code null} for none */ public java.lang.String getRedundancyType() { return redundancyType; } /** * Indicates the user-supplied redundancy type of this external VPN gateway. * @param redundancyType redundancyType or {@code null} for none */ public ExternalVpnGateway setRedundancyType(java.lang.String redundancyType) { this.redundancyType = redundancyType; return this; } /** * [Output Only] Server-defined URL for the resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for the resource. * @param selfLink selfLink or {@code null} for none */ public ExternalVpnGateway setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } @Override public ExternalVpnGateway set(String fieldName, Object value) { return (ExternalVpnGateway) super.set(fieldName, value); } @Override public ExternalVpnGateway clone() { return (ExternalVpnGateway) super.clone(); } }
/** */ package substationStandard.LNNodes.LNGroupM.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import substationStandard.Dataclasses.CMV; import substationStandard.Dataclasses.DPL; import substationStandard.Dataclasses.MV; import substationStandard.Enumerations.HealthStateKind; import substationStandard.LNNodes.LNGroupM.LNGroupMPackage; import substationStandard.LNNodes.LNGroupM.MMXN; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>MMXN</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getEEName <em>EE Name</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getEEHealth <em>EE Health</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getAmp <em>Amp</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getVol <em>Vol</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getWatt <em>Watt</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getVolAmpr <em>Vol Ampr</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getVolAmp <em>Vol Amp</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getPwrFact <em>Pwr Fact</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getImp <em>Imp</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupM.impl.MMXNImpl#getHz <em>Hz</em>}</li> * </ul> * * @generated */ public class MMXNImpl extends GroupMImpl implements MMXN { /** * The cached value of the '{@link #getEEName() <em>EE Name</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEEName() * @generated * @ordered */ protected DPL eeName; /** * The default value of the '{@link #getEEHealth() <em>EE Health</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEEHealth() * @generated * @ordered */ protected static final HealthStateKind EE_HEALTH_EDEFAULT = HealthStateKind.OK; /** * The cached value of the '{@link #getEEHealth() <em>EE Health</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEEHealth() * @generated * @ordered */ protected HealthStateKind eeHealth = EE_HEALTH_EDEFAULT; /** * The cached value of the '{@link #getAmp() <em>Amp</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAmp() * @generated * @ordered */ protected MV amp; /** * The cached value of the '{@link #getVol() <em>Vol</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVol() * @generated * @ordered */ protected MV vol; /** * The cached value of the '{@link #getWatt() <em>Watt</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getWatt() * @generated * @ordered */ protected MV watt; /** * The cached value of the '{@link #getVolAmpr() <em>Vol Ampr</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVolAmpr() * @generated * @ordered */ protected MV volAmpr; /** * The cached value of the '{@link #getVolAmp() <em>Vol Amp</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVolAmp() * @generated * @ordered */ protected MV volAmp; /** * The cached value of the '{@link #getPwrFact() <em>Pwr Fact</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPwrFact() * @generated * @ordered */ protected MV pwrFact; /** * The cached value of the '{@link #getImp() <em>Imp</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getImp() * @generated * @ordered */ protected CMV imp; /** * The cached value of the '{@link #getHz() <em>Hz</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getHz() * @generated * @ordered */ protected MV hz; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected MMXNImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return LNGroupMPackage.Literals.MMXN; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DPL getEEName() { if (eeName != null && eeName.eIsProxy()) { InternalEObject oldEEName = (InternalEObject)eeName; eeName = (DPL)eResolveProxy(oldEEName); if (eeName != oldEEName) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__EE_NAME, oldEEName, eeName)); } } return eeName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DPL basicGetEEName() { return eeName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setEEName(DPL newEEName) { DPL oldEEName = eeName; eeName = newEEName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__EE_NAME, oldEEName, eeName)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public HealthStateKind getEEHealth() { return eeHealth; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setEEHealth(HealthStateKind newEEHealth) { HealthStateKind oldEEHealth = eeHealth; eeHealth = newEEHealth == null ? EE_HEALTH_EDEFAULT : newEEHealth; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__EE_HEALTH, oldEEHealth, eeHealth)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getAmp() { if (amp != null && amp.eIsProxy()) { InternalEObject oldAmp = (InternalEObject)amp; amp = (MV)eResolveProxy(oldAmp); if (amp != oldAmp) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__AMP, oldAmp, amp)); } } return amp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetAmp() { return amp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setAmp(MV newAmp) { MV oldAmp = amp; amp = newAmp; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__AMP, oldAmp, amp)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getVol() { if (vol != null && vol.eIsProxy()) { InternalEObject oldVol = (InternalEObject)vol; vol = (MV)eResolveProxy(oldVol); if (vol != oldVol) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__VOL, oldVol, vol)); } } return vol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetVol() { return vol; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVol(MV newVol) { MV oldVol = vol; vol = newVol; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__VOL, oldVol, vol)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getWatt() { if (watt != null && watt.eIsProxy()) { InternalEObject oldWatt = (InternalEObject)watt; watt = (MV)eResolveProxy(oldWatt); if (watt != oldWatt) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__WATT, oldWatt, watt)); } } return watt; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetWatt() { return watt; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setWatt(MV newWatt) { MV oldWatt = watt; watt = newWatt; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__WATT, oldWatt, watt)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getVolAmpr() { if (volAmpr != null && volAmpr.eIsProxy()) { InternalEObject oldVolAmpr = (InternalEObject)volAmpr; volAmpr = (MV)eResolveProxy(oldVolAmpr); if (volAmpr != oldVolAmpr) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__VOL_AMPR, oldVolAmpr, volAmpr)); } } return volAmpr; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetVolAmpr() { return volAmpr; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVolAmpr(MV newVolAmpr) { MV oldVolAmpr = volAmpr; volAmpr = newVolAmpr; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__VOL_AMPR, oldVolAmpr, volAmpr)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getVolAmp() { if (volAmp != null && volAmp.eIsProxy()) { InternalEObject oldVolAmp = (InternalEObject)volAmp; volAmp = (MV)eResolveProxy(oldVolAmp); if (volAmp != oldVolAmp) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__VOL_AMP, oldVolAmp, volAmp)); } } return volAmp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetVolAmp() { return volAmp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVolAmp(MV newVolAmp) { MV oldVolAmp = volAmp; volAmp = newVolAmp; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__VOL_AMP, oldVolAmp, volAmp)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getPwrFact() { if (pwrFact != null && pwrFact.eIsProxy()) { InternalEObject oldPwrFact = (InternalEObject)pwrFact; pwrFact = (MV)eResolveProxy(oldPwrFact); if (pwrFact != oldPwrFact) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__PWR_FACT, oldPwrFact, pwrFact)); } } return pwrFact; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetPwrFact() { return pwrFact; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPwrFact(MV newPwrFact) { MV oldPwrFact = pwrFact; pwrFact = newPwrFact; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__PWR_FACT, oldPwrFact, pwrFact)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CMV getImp() { if (imp != null && imp.eIsProxy()) { InternalEObject oldImp = (InternalEObject)imp; imp = (CMV)eResolveProxy(oldImp); if (imp != oldImp) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__IMP, oldImp, imp)); } } return imp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CMV basicGetImp() { return imp; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setImp(CMV newImp) { CMV oldImp = imp; imp = newImp; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__IMP, oldImp, imp)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV getHz() { if (hz != null && hz.eIsProxy()) { InternalEObject oldHz = (InternalEObject)hz; hz = (MV)eResolveProxy(oldHz); if (hz != oldHz) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupMPackage.MMXN__HZ, oldHz, hz)); } } return hz; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MV basicGetHz() { return hz; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setHz(MV newHz) { MV oldHz = hz; hz = newHz; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupMPackage.MMXN__HZ, oldHz, hz)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case LNGroupMPackage.MMXN__EE_NAME: if (resolve) return getEEName(); return basicGetEEName(); case LNGroupMPackage.MMXN__EE_HEALTH: return getEEHealth(); case LNGroupMPackage.MMXN__AMP: if (resolve) return getAmp(); return basicGetAmp(); case LNGroupMPackage.MMXN__VOL: if (resolve) return getVol(); return basicGetVol(); case LNGroupMPackage.MMXN__WATT: if (resolve) return getWatt(); return basicGetWatt(); case LNGroupMPackage.MMXN__VOL_AMPR: if (resolve) return getVolAmpr(); return basicGetVolAmpr(); case LNGroupMPackage.MMXN__VOL_AMP: if (resolve) return getVolAmp(); return basicGetVolAmp(); case LNGroupMPackage.MMXN__PWR_FACT: if (resolve) return getPwrFact(); return basicGetPwrFact(); case LNGroupMPackage.MMXN__IMP: if (resolve) return getImp(); return basicGetImp(); case LNGroupMPackage.MMXN__HZ: if (resolve) return getHz(); return basicGetHz(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case LNGroupMPackage.MMXN__EE_NAME: setEEName((DPL)newValue); return; case LNGroupMPackage.MMXN__EE_HEALTH: setEEHealth((HealthStateKind)newValue); return; case LNGroupMPackage.MMXN__AMP: setAmp((MV)newValue); return; case LNGroupMPackage.MMXN__VOL: setVol((MV)newValue); return; case LNGroupMPackage.MMXN__WATT: setWatt((MV)newValue); return; case LNGroupMPackage.MMXN__VOL_AMPR: setVolAmpr((MV)newValue); return; case LNGroupMPackage.MMXN__VOL_AMP: setVolAmp((MV)newValue); return; case LNGroupMPackage.MMXN__PWR_FACT: setPwrFact((MV)newValue); return; case LNGroupMPackage.MMXN__IMP: setImp((CMV)newValue); return; case LNGroupMPackage.MMXN__HZ: setHz((MV)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case LNGroupMPackage.MMXN__EE_NAME: setEEName((DPL)null); return; case LNGroupMPackage.MMXN__EE_HEALTH: setEEHealth(EE_HEALTH_EDEFAULT); return; case LNGroupMPackage.MMXN__AMP: setAmp((MV)null); return; case LNGroupMPackage.MMXN__VOL: setVol((MV)null); return; case LNGroupMPackage.MMXN__WATT: setWatt((MV)null); return; case LNGroupMPackage.MMXN__VOL_AMPR: setVolAmpr((MV)null); return; case LNGroupMPackage.MMXN__VOL_AMP: setVolAmp((MV)null); return; case LNGroupMPackage.MMXN__PWR_FACT: setPwrFact((MV)null); return; case LNGroupMPackage.MMXN__IMP: setImp((CMV)null); return; case LNGroupMPackage.MMXN__HZ: setHz((MV)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case LNGroupMPackage.MMXN__EE_NAME: return eeName != null; case LNGroupMPackage.MMXN__EE_HEALTH: return eeHealth != EE_HEALTH_EDEFAULT; case LNGroupMPackage.MMXN__AMP: return amp != null; case LNGroupMPackage.MMXN__VOL: return vol != null; case LNGroupMPackage.MMXN__WATT: return watt != null; case LNGroupMPackage.MMXN__VOL_AMPR: return volAmpr != null; case LNGroupMPackage.MMXN__VOL_AMP: return volAmp != null; case LNGroupMPackage.MMXN__PWR_FACT: return pwrFact != null; case LNGroupMPackage.MMXN__IMP: return imp != null; case LNGroupMPackage.MMXN__HZ: return hz != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (EEHealth: "); result.append(eeHealth); result.append(')'); return result.toString(); } } //MMXNImpl