gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.upgradeTests.Changes10_2
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.upgradeTests;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import javax.sql.DataSource;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.JDBCDataSource;
/**
* Upgrade test cases for changes made in 10.2.
* If the old version is 10.2 or later then these tests
* will not be run.
* <BR>
* 10.2 Upgrade issues
* <UL>
* <LI> testTriggerInternalVTI - Check internal re-write of triggers
* does not break triggers in soft upgrade mode.
* <LI> testReusableRecordIdSequenceNumber - Test reuseable record
* identifiers does not cause issues in soft upgrade
* <LI> testGrantRevokeStatements - Check G/R not allowed in soft upgrade.
* <LI> testDatabaseOwner - test that on a hard upgrade database owner is set.
* </UL>
*/
public class Changes10_2 extends UpgradeChange {
public static Test suite() {
TestSuite suite = new TestSuite("Upgrade changes for 10.2");
suite.addTestSuite(Changes10_2.class);
// Encryption only support on J2SE or higher.
if (JDBC.vmSupportsJDBC3())
{
suite.addTest(new Changes10_2("changeEncryptionFromNone"));
suite.addTest(new Changes10_2("changeEncryptionFromEncryptedDatabase"));
}
return suite;
}
public Changes10_2(String name) {
super(name);
}
/**
* Triger (internal) VTI
* 10.2 - Check that a statement trigger created in 10.0
* or 10.1 can be executed in 10.2 and that a statement
* trigger created in soft upgrade in 10.2 can be used
* in older releases.
*
* The VTI implementing statement triggers changed in
* 10.2 from implementations of ResultSet to implementations
* of PreparedStatement. See DERBY-438. The internal
* api for the re-written action statement remains the
* same. The re-compile of the trigger on version changes
* should automatically switch between the two implementations.
*
* @throws SQLException
*/
public void testTriggerInternalVTI()
throws SQLException {
Statement s = createStatement();
boolean modeDb2SqlOptional = oldAtLeast(10, 3);
switch (getPhase()) {
case PH_CREATE:
s.execute("CREATE TABLE D438.T438(a int, b varchar(20), c int)");
s.execute("INSERT INTO D438.T438 VALUES(1, 'DERBY-438', 2)");
s.execute("CREATE TABLE D438.T438_T1(a int, b varchar(20))");
s.execute("CREATE TABLE D438.T438_T2(a int, c int)");
s.execute(
"create trigger D438.T438_ROW_1 after UPDATE on D438.T438 " +
"referencing new as n old as o " +
"for each row "+
(modeDb2SqlOptional?"":"mode db2sql ") +
"insert into D438.T438_T1(a, b) values (n.a, n.b || '_ROW')");
s.executeUpdate(
"create trigger D438.T438_STMT_1 after UPDATE on D438.T438 " +
"referencing new_table as n " +
"for each statement "+
(modeDb2SqlOptional?"":"mode db2sql ") +
"insert into D438.T438_T1(a, b) select n.a, n.b || '_STMT' from n");
commit();
break;
case PH_SOFT_UPGRADE:
s.execute(
"create trigger D438.T438_ROW_2 after UPDATE on D438.T438 " +
"referencing new as n old as o " +
"for each row "+
(modeDb2SqlOptional?"":"mode db2sql ") +
"insert into D438.T438_T2(a, c) values (n.a, n.c + 100)");
s.executeUpdate(
"create trigger D438.T438_STMT_2 after UPDATE on D438.T438 " +
"referencing new_table as n " +
"for each statement "+
(modeDb2SqlOptional?"":"mode db2sql ") +
"insert into D438.T438_T2(a, c) select n.a, n.c + 4000 from n");
commit();
break;
case PH_POST_SOFT_UPGRADE:
break;
case PH_HARD_UPGRADE:
break;
}
// Test the firing of the triggers
s.executeUpdate("UPDATE D438.T438 set c = c + 1");
commit();
ResultSet rs = s.executeQuery("SELECT a,b from D438.T438_T1 ORDER BY 2");
JDBC.assertFullResultSet(rs, new String[][]
{{"1", "DERBY-438_ROW"},
{"1", "DERBY-438_STMT"}});
rs.close();
rs = s.executeQuery("SELECT a,c from D438.T438_T2 ORDER BY 2");
if (getPhase() == PH_CREATE)
{
// expect no rows since the trigger that populates
// the table is defined in soft upgrade.
assertFalse(rs.next());
}
else
{
JDBC.assertFullResultSet(rs, new String[][] {
{"1", Integer.toString(2 + 100 + getPhase() + 1)},
{"1", Integer.toString(2 + 4000 + getPhase() + 1)}});
}
rs.close();
s.executeUpdate("DELETE FROM D438.T438_T1");
s.executeUpdate("DELETE FROM D438.T438_T2");
commit();
s.close();
}
/**
* In 10.2: We will write a ReusableRecordIdSequenceNumber in the
* header of a FileContaienr.
*
* Verify here that a 10.1 Database does not malfunction from this.
* 10.1 Databases should ignore the field.
*/
public void testReusableRecordIdSequenceNumber()
throws SQLException
{
boolean runCompress = oldAtLeast(10, 1);
switch(getPhase()) {
case PH_CREATE: {
Statement s = createStatement();
s.execute("create table CT1(id int)");
s.execute("insert into CT1 values 1,2,3,4,5,6,7,8,9,10");
s.close();
commit();
break;
}
case PH_SOFT_UPGRADE:
if (runCompress) {
PreparedStatement ps = prepareStatement
("call SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE(?,?,?,?,?)");
ps.setString(1, "APP"); // schema
ps.setString(2, "CT1"); // table name
ps.setInt(3, 1); // purge
ps.setInt(4, 1); // defragment rows
ps.setInt(5, 1); // truncate end
ps.executeUpdate();
ps.close();
commit();
}
break;
case PH_POST_SOFT_UPGRADE: {
// We are now back to i.e 10.1
Statement s = createStatement();
ResultSet rs = s.executeQuery("select * from CT1");
while (rs.next()) {
rs.getInt(1);
}
s.execute("insert into CT1 values 11,12,13,14,15,16,17,18,19");
s.close();
commit();
break;
}
case PH_HARD_UPGRADE:
break;
}
}
/**
* Simple test of if GRANT/REVOKE statements are handled
* correctly in terms of being allowed in soft upgrade.
* @throws SQLException
*
*/
public void testGrantRevokeStatements() throws SQLException
{
Statement s = createStatement();
switch(getPhase()) {
//
case PH_CREATE:
case PH_POST_SOFT_UPGRADE:
// was syntax error in 10.0,10.1
assertStatementError("42X01", s,
"GRANT SELECT ON TABLE1 TO USER1");
assertStatementError("42X01", s,
"REVOKE SELECT ON TABLE1 FROM USER1");
break;
case PH_SOFT_UPGRADE:
// require hard upgrade
assertStatementError(SQLSTATE_NEED_UPGRADE, s,
"GRANT SELECT ON TABLE1 TO USER1");
assertStatementError(SQLSTATE_NEED_UPGRADE, s,
"REVOKE SELECT ON TABLE1 FROM USER1");
break;
case PH_HARD_UPGRADE:
// not supported because SQL authorization not set
assertStatementError("42Z60", s,
"GRANT SELECT ON TABLE1 TO USER1");
assertStatementError("42Z60", s,
"REVOKE SELECT ON TABLE1 FROM USER1");
break;
}
s.close();
}
/**
* This method lists the schema names and authorization ids in
* SYS.SCHEMAS table. This is to test that the owner of system schemas is
* changed from pseudo user "DBA" to the user invoking upgrade.
*
* @throws SQLException
*/
public void testDatabaseOwnerChange() throws SQLException
{
switch (getPhase())
{
case PH_CREATE:
case PH_SOFT_UPGRADE:
case PH_POST_SOFT_UPGRADE:
checkSystemSchemasOwner("DBA");
break;
case PH_HARD_UPGRADE:
checkSystemSchemasOwner(getTestConfiguration().getUserName());
break;
}
}
private void checkSystemSchemasOwner(String name) throws SQLException
{
Statement s = createStatement();
ResultSet rs = s.executeQuery(
"select AUTHORIZATIONID, SCHEMANAME from SYS.SYSSCHEMAS " +
"WHERE SCHEMANAME LIKE 'SYS%' OR " +
"SCHEMANAME IN ('NULLID', 'SQLJ')");
while (rs.next()) {
assertEquals("AUTHORIZATIONID not valid for " + rs.getString(2),
name, rs.getString(1));
}
rs.close();
s.close();
}
/**
* This method checks that some system routines are granted public access
* after a full upgrade.
*
* @throws SQLException
*/
public void testSystemRoutinePermissions() throws SQLException
{
switch (getPhase())
{
case PH_CREATE:
case PH_SOFT_UPGRADE:
case PH_POST_SOFT_UPGRADE:
break;
case PH_HARD_UPGRADE:
Statement s = createStatement();
ResultSet rs = s.executeQuery("select A.ALIAS FROM " +
"SYS.SYSROUTINEPERMS R, SYS.SYSALIASES A " +
"WHERE R.ALIASID = A.ALIASID AND " +
"R.GRANTEE = 'PUBLIC' AND " +
"R.GRANTOR = '"
+ getTestConfiguration().getUserName() + "'" +
" ORDER BY 1");
JDBC.assertFullResultSet(rs, new String[][]
{{"SYSCS_COMPRESS_TABLE"},
{"SYSCS_GET_RUNTIMESTATISTICS"},
{"SYSCS_INPLACE_COMPRESS_TABLE"},
{"SYSCS_SET_RUNTIMESTATISTICS"},
{"SYSCS_SET_STATISTICS_TIMING"}}
);
rs.close();
s.close();
break;
}
}
/**
* Run the change encryption test against a
* non-encrypted database. Test that changing the encryption
* is only allowed if the database has been hard-upgraded.
* This test assumes it has its own single use database, which
* will not be booted by the general upgrade test setup.
* @throws SQLException
*/
public void changeEncryptionFromNone() throws SQLException
{
DataSource ds = JDBCDataSource.getDataSourceLogical("NO_ENCRYPT_10_2");
switch (getPhase())
{
case PH_CREATE:
// create the database if it was not already created.
JDBCDataSource.setBeanProperty(ds, "createDatabase", "create");
ds.getConnection().close();
break;
case PH_SOFT_UPGRADE:
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"dataEncryption=true;bootPassword=xyz1234abc");
try {
ds.getConnection();
fail("open re-encrypted connection in soft upgrade");
} catch (SQLException e) {
assertSQLState("XJ040", e);
e = e.getNextException();
assertNotNull(e);
assertSQLState("XCL47", e);
}
break;
case PH_POST_SOFT_UPGRADE:
// Should be able to successfully connect to it
// using the old setup.
ds.getConnection().close();
break;
case PH_HARD_UPGRADE:
// On hard upgrade should be able to connect to it
// changing the encryption.
// Note we have to explicitly upgrade additional databases.
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"upgrade=true;dataEncryption=true;bootPassword=haRD1234upGrAde");
ds.getConnection().close();
// Shutdown the database.
JDBCDataSource.clearStringBeanProperty(ds, "connectionAttributes");
JDBCDataSource.shutdownDatabase(ds);
// Reboot with no boot password, should fail
try {
ds.getConnection();
fail("open re-encrypted connection without password");
} catch (SQLException e) {
assertSQLState("XJ040", e);
e = e.getNextException();
assertNotNull(e);
assertSQLState("XBM06", e);
}
// And connect successfully.
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"bootPassword=haRD1234upGrAde");
ds.getConnection().close();
break;
}
}
/**
* Run the change encryption test against a
* encrypted database. Test that changing the encryption
* is only allowed if the database has been hard-upgraded.
* This test assumes it has its own single use database, which
* will not be booted by the general upgrade test setup.
* @throws SQLException
*/
public void changeEncryptionFromEncryptedDatabase() throws SQLException
{
DataSource ds = JDBCDataSource.getDataSourceLogical("ENCRYPT_10_2");
switch (getPhase())
{
case PH_CREATE:
// create the database encrypted
JDBCDataSource.setBeanProperty(ds, "createDatabase", "create");
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"dataEncryption=true;bootPassword=old862phRase");
ds.getConnection().close();
break;
case PH_SOFT_UPGRADE:
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"bootPassword=old862phRase;newBootPassword=new902pHrAse");
try {
ds.getConnection();
fail("open re-encrypted connection in soft upgrade");
} catch (SQLException e) {
assertSQLState("XJ040", e);
e = e.getNextException();
assertNotNull(e);
assertSQLState("XCL47", e);
}
break;
case PH_POST_SOFT_UPGRADE:
// Should be able to successfully connect to it
// using the old setup.
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"bootPassword=old862phRase");
ds.getConnection().close();
break;
case PH_HARD_UPGRADE:
// On hard upgrade should be able to connect to it
// changing the encryption.
// Note we have to explicitly upgrade additional databases.
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"upgrade=true;bootPassword=old862phRase;newBootPassword=hard924pHrAse");
ds.getConnection().close();
// Shutdown the database.
JDBCDataSource.clearStringBeanProperty(ds, "connectionAttributes");
JDBCDataSource.shutdownDatabase(ds);
// Reboot with no boot password, should fail
try {
ds.getConnection();
fail("open re-encrypted connection without password");
} catch (SQLException e) {
assertSQLState("XJ040", e);
e = e.getNextException();
assertNotNull(e);
assertSQLState("XBM06", e);
}
// And connect successfully.
JDBCDataSource.setBeanProperty(ds, "connectionAttributes",
"bootPassword=hard924pHrAse");
ds.getConnection().close();
break;
}
}
}
| |
package com.google.api.ads.adwords.jaxws.v201509.cm;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* A biddable (positive) criterion in an adgroup.
*
*
* <p>Java class for BiddableAdGroupCriterion complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="BiddableAdGroupCriterion">
* <complexContent>
* <extension base="{https://adwords.google.com/api/adwords/cm/v201509}AdGroupCriterion">
* <sequence>
* <element name="userStatus" type="{https://adwords.google.com/api/adwords/cm/v201509}UserStatus" minOccurs="0"/>
* <element name="systemServingStatus" type="{https://adwords.google.com/api/adwords/cm/v201509}SystemServingStatus" minOccurs="0"/>
* <element name="approvalStatus" type="{https://adwords.google.com/api/adwords/cm/v201509}ApprovalStatus" minOccurs="0"/>
* <element name="disapprovalReasons" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="destinationUrl" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="experimentData" type="{https://adwords.google.com/api/adwords/cm/v201509}BiddableAdGroupCriterionExperimentData" minOccurs="0"/>
* <element name="firstPageCpc" type="{https://adwords.google.com/api/adwords/cm/v201509}Bid" minOccurs="0"/>
* <element name="topOfPageCpc" type="{https://adwords.google.com/api/adwords/cm/v201509}Bid" minOccurs="0"/>
* <element name="qualityInfo" type="{https://adwords.google.com/api/adwords/cm/v201509}QualityInfo" minOccurs="0"/>
* <element name="biddingStrategyConfiguration" type="{https://adwords.google.com/api/adwords/cm/v201509}BiddingStrategyConfiguration" minOccurs="0"/>
* <element name="bidModifier" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/>
* <element name="finalUrls" type="{https://adwords.google.com/api/adwords/cm/v201509}UrlList" minOccurs="0"/>
* <element name="finalMobileUrls" type="{https://adwords.google.com/api/adwords/cm/v201509}UrlList" minOccurs="0"/>
* <element name="finalAppUrls" type="{https://adwords.google.com/api/adwords/cm/v201509}AppUrlList" minOccurs="0"/>
* <element name="trackingUrlTemplate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="urlCustomParameters" type="{https://adwords.google.com/api/adwords/cm/v201509}CustomParameters" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "BiddableAdGroupCriterion", propOrder = {
"userStatus",
"systemServingStatus",
"approvalStatus",
"disapprovalReasons",
"destinationUrl",
"experimentData",
"firstPageCpc",
"topOfPageCpc",
"qualityInfo",
"biddingStrategyConfiguration",
"bidModifier",
"finalUrls",
"finalMobileUrls",
"finalAppUrls",
"trackingUrlTemplate",
"urlCustomParameters"
})
public class BiddableAdGroupCriterion
extends AdGroupCriterion
{
@XmlSchemaType(name = "string")
protected UserStatus userStatus;
@XmlSchemaType(name = "string")
protected SystemServingStatus systemServingStatus;
@XmlSchemaType(name = "string")
protected ApprovalStatus approvalStatus;
protected List<String> disapprovalReasons;
protected String destinationUrl;
protected BiddableAdGroupCriterionExperimentData experimentData;
protected Bid firstPageCpc;
protected Bid topOfPageCpc;
protected QualityInfo qualityInfo;
protected BiddingStrategyConfiguration biddingStrategyConfiguration;
protected Double bidModifier;
protected UrlList finalUrls;
protected UrlList finalMobileUrls;
protected AppUrlList finalAppUrls;
protected String trackingUrlTemplate;
protected CustomParameters urlCustomParameters;
/**
* Gets the value of the userStatus property.
*
* @return
* possible object is
* {@link UserStatus }
*
*/
public UserStatus getUserStatus() {
return userStatus;
}
/**
* Sets the value of the userStatus property.
*
* @param value
* allowed object is
* {@link UserStatus }
*
*/
public void setUserStatus(UserStatus value) {
this.userStatus = value;
}
/**
* Gets the value of the systemServingStatus property.
*
* @return
* possible object is
* {@link SystemServingStatus }
*
*/
public SystemServingStatus getSystemServingStatus() {
return systemServingStatus;
}
/**
* Sets the value of the systemServingStatus property.
*
* @param value
* allowed object is
* {@link SystemServingStatus }
*
*/
public void setSystemServingStatus(SystemServingStatus value) {
this.systemServingStatus = value;
}
/**
* Gets the value of the approvalStatus property.
*
* @return
* possible object is
* {@link ApprovalStatus }
*
*/
public ApprovalStatus getApprovalStatus() {
return approvalStatus;
}
/**
* Sets the value of the approvalStatus property.
*
* @param value
* allowed object is
* {@link ApprovalStatus }
*
*/
public void setApprovalStatus(ApprovalStatus value) {
this.approvalStatus = value;
}
/**
* Gets the value of the disapprovalReasons property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the disapprovalReasons property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDisapprovalReasons().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getDisapprovalReasons() {
if (disapprovalReasons == null) {
disapprovalReasons = new ArrayList<String>();
}
return this.disapprovalReasons;
}
/**
* Gets the value of the destinationUrl property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDestinationUrl() {
return destinationUrl;
}
/**
* Sets the value of the destinationUrl property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDestinationUrl(String value) {
this.destinationUrl = value;
}
/**
* Gets the value of the experimentData property.
*
* @return
* possible object is
* {@link BiddableAdGroupCriterionExperimentData }
*
*/
public BiddableAdGroupCriterionExperimentData getExperimentData() {
return experimentData;
}
/**
* Sets the value of the experimentData property.
*
* @param value
* allowed object is
* {@link BiddableAdGroupCriterionExperimentData }
*
*/
public void setExperimentData(BiddableAdGroupCriterionExperimentData value) {
this.experimentData = value;
}
/**
* Gets the value of the firstPageCpc property.
*
* @return
* possible object is
* {@link Bid }
*
*/
public Bid getFirstPageCpc() {
return firstPageCpc;
}
/**
* Sets the value of the firstPageCpc property.
*
* @param value
* allowed object is
* {@link Bid }
*
*/
public void setFirstPageCpc(Bid value) {
this.firstPageCpc = value;
}
/**
* Gets the value of the topOfPageCpc property.
*
* @return
* possible object is
* {@link Bid }
*
*/
public Bid getTopOfPageCpc() {
return topOfPageCpc;
}
/**
* Sets the value of the topOfPageCpc property.
*
* @param value
* allowed object is
* {@link Bid }
*
*/
public void setTopOfPageCpc(Bid value) {
this.topOfPageCpc = value;
}
/**
* Gets the value of the qualityInfo property.
*
* @return
* possible object is
* {@link QualityInfo }
*
*/
public QualityInfo getQualityInfo() {
return qualityInfo;
}
/**
* Sets the value of the qualityInfo property.
*
* @param value
* allowed object is
* {@link QualityInfo }
*
*/
public void setQualityInfo(QualityInfo value) {
this.qualityInfo = value;
}
/**
* Gets the value of the biddingStrategyConfiguration property.
*
* @return
* possible object is
* {@link BiddingStrategyConfiguration }
*
*/
public BiddingStrategyConfiguration getBiddingStrategyConfiguration() {
return biddingStrategyConfiguration;
}
/**
* Sets the value of the biddingStrategyConfiguration property.
*
* @param value
* allowed object is
* {@link BiddingStrategyConfiguration }
*
*/
public void setBiddingStrategyConfiguration(BiddingStrategyConfiguration value) {
this.biddingStrategyConfiguration = value;
}
/**
* Gets the value of the bidModifier property.
*
* @return
* possible object is
* {@link Double }
*
*/
public Double getBidModifier() {
return bidModifier;
}
/**
* Sets the value of the bidModifier property.
*
* @param value
* allowed object is
* {@link Double }
*
*/
public void setBidModifier(Double value) {
this.bidModifier = value;
}
/**
* Gets the value of the finalUrls property.
*
* @return
* possible object is
* {@link UrlList }
*
*/
public UrlList getFinalUrls() {
return finalUrls;
}
/**
* Sets the value of the finalUrls property.
*
* @param value
* allowed object is
* {@link UrlList }
*
*/
public void setFinalUrls(UrlList value) {
this.finalUrls = value;
}
/**
* Gets the value of the finalMobileUrls property.
*
* @return
* possible object is
* {@link UrlList }
*
*/
public UrlList getFinalMobileUrls() {
return finalMobileUrls;
}
/**
* Sets the value of the finalMobileUrls property.
*
* @param value
* allowed object is
* {@link UrlList }
*
*/
public void setFinalMobileUrls(UrlList value) {
this.finalMobileUrls = value;
}
/**
* Gets the value of the finalAppUrls property.
*
* @return
* possible object is
* {@link AppUrlList }
*
*/
public AppUrlList getFinalAppUrls() {
return finalAppUrls;
}
/**
* Sets the value of the finalAppUrls property.
*
* @param value
* allowed object is
* {@link AppUrlList }
*
*/
public void setFinalAppUrls(AppUrlList value) {
this.finalAppUrls = value;
}
/**
* Gets the value of the trackingUrlTemplate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTrackingUrlTemplate() {
return trackingUrlTemplate;
}
/**
* Sets the value of the trackingUrlTemplate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTrackingUrlTemplate(String value) {
this.trackingUrlTemplate = value;
}
/**
* Gets the value of the urlCustomParameters property.
*
* @return
* possible object is
* {@link CustomParameters }
*
*/
public CustomParameters getUrlCustomParameters() {
return urlCustomParameters;
}
/**
* Sets the value of the urlCustomParameters property.
*
* @param value
* allowed object is
* {@link CustomParameters }
*
*/
public void setUrlCustomParameters(CustomParameters value) {
this.urlCustomParameters = value;
}
}
| |
package au.edu.unsw.cse.soc.federatedcloud.deployers.github.repository;
/*
* Copyright (c) 2014, Denis Weerasiri All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*******************************************************************************
* Copyright (c) 2011 GitHub Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Kevin Sawicki (GitHub Inc.) - initial API and implementation
* Christian Trutz - HttpClient 4.1
*******************************************************************************/
import static com.google.gson.stream.JsonToken.BEGIN_ARRAY;
import static java.net.HttpURLConnection.HTTP_ACCEPTED;
import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
import static java.net.HttpURLConnection.HTTP_CONFLICT;
import static java.net.HttpURLConnection.HTTP_CREATED;
import static java.net.HttpURLConnection.HTTP_FORBIDDEN;
import static java.net.HttpURLConnection.HTTP_GONE;
import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
import static java.net.HttpURLConnection.HTTP_NO_CONTENT;
import static java.net.HttpURLConnection.HTTP_OK;
import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED;
import static org.eclipse.egit.github.core.client.IGitHubConstants.AUTH_TOKEN;
import static org.eclipse.egit.github.core.client.IGitHubConstants.CHARSET_UTF8;
import static org.eclipse.egit.github.core.client.IGitHubConstants.CONTENT_TYPE_JSON;
import static org.eclipse.egit.github.core.client.IGitHubConstants.HOST_API;
import static org.eclipse.egit.github.core.client.IGitHubConstants.HOST_DEFAULT;
import static org.eclipse.egit.github.core.client.IGitHubConstants.HOST_GISTS;
import static org.eclipse.egit.github.core.client.IGitHubConstants.PROTOCOL_HTTPS;
import static org.eclipse.egit.github.core.client.IGitHubConstants.SEGMENT_V3_API;
import com.google.gson.Gson;
import com.google.gson.JsonParseException;
import com.google.gson.stream.JsonReader;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.net.HttpURLConnection;
import java.net.URL;
import org.eclipse.egit.github.core.RequestError;
import org.eclipse.egit.github.core.util.EncodingUtils;
/**
* Client class for interacting with GitHub HTTP/JSON API.
*/
public class GitHubClient {
/**
* Create API v3 client from URL.
* <p>
* This creates an HTTPS-based client with a host that contains the host
* value of the given URL prefixed with 'api' if the given URL is github.com
* or gist.github.com
*
* @param url
* @return client
*/
public static GitHubClient createClient(String url) {
try {
String host = new URL(url).getHost();
if (HOST_DEFAULT.equals(host) || HOST_GISTS.equals(host))
host = HOST_API;
return new GitHubClient(host);
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Content-Type header
*/
protected static final String HEADER_CONTENT_TYPE = "Content-Type"; //$NON-NLS-1$
/**
* Accept header
*/
protected static final String HEADER_ACCEPT = "Accept"; //$NON-NLS-1$
/**
* Authorization header
*/
protected static final String HEADER_AUTHORIZATION = "Authorization"; //$NON-NLS-1$
/**
* User-Agent header
*/
protected static final String HEADER_USER_AGENT = "User-Agent"; //$NON-NLS-1$
/**
* METHOD_GET
*/
protected static final String METHOD_GET = "GET"; //$NON-NLS-1$
/**
* METHOD_PUT
*/
protected static final String METHOD_PUT = "PUT"; //$NON-NLS-1$
/**
* METHOD_POST
*/
protected static final String METHOD_POST = "POST"; //$NON-NLS-1$
/**
* METHOD_DELETE
*/
protected static final String METHOD_DELETE = "DELETE"; //$NON-NLS-1$
/**
* Default user agent request header value
*/
protected static final String USER_AGENT = "GitHubJava/2.1.0"; //$NON-NLS-1$
/**
* 422 status code for unprocessable entity
*/
protected static final int HTTP_UNPROCESSABLE_ENTITY = 422;
/**
* Base URI
*/
protected final String baseUri;
/**
* Prefix to apply to base URI
*/
protected final String prefix;
/**
* {@link Gson} instance
*/
protected Gson gson = GsonUtils.getGson();
private String user;
private String credentials;
private String userAgent = USER_AGENT;
private int bufferSize = 8192;
private int requestLimit = -1;
private int remainingRequests = -1;
/**
* Create default client
*/
public GitHubClient() {
this(HOST_API);
}
/**
* Create client for host name
*
* @param hostname
*/
public GitHubClient(String hostname) {
this(hostname, -1, PROTOCOL_HTTPS);
}
/**
* Create client for host, port, and scheme
*
* @param hostname
* @param port
* @param scheme
*/
public GitHubClient(final String hostname, final int port,
final String scheme) {
final StringBuilder uri = new StringBuilder(scheme);
uri.append("://"); //$NON-NLS-1$
uri.append(hostname);
if (port > 0)
uri.append(':').append(port);
baseUri = uri.toString();
// Use URI prefix on non-standard host names
if (HOST_API.equals(hostname))
prefix = null;
else
prefix = SEGMENT_V3_API;
}
/**
* Set whether or not serialized data should include fields that are null.
*
* @param serializeNulls
* @return this client
*/
public GitHubClient setSerializeNulls(boolean serializeNulls) {
gson = GsonUtils.getGson(serializeNulls);
return this;
}
/**
* Set the value to set as the user agent header on every request created.
* Specifying a null or empty agent parameter will reset this client to use
* the default user agent header value.
*
* @param agent
* @return this client
*/
public GitHubClient setUserAgent(final String agent) {
if (agent != null && agent.length() > 0)
userAgent = agent;
else
userAgent = USER_AGENT;
return this;
}
/**
* Configure request with standard headers
*
* @param request
* @return configured request
*/
protected HttpURLConnection configureRequest(final HttpURLConnection request) {
if (credentials != null)
request.setRequestProperty(HEADER_AUTHORIZATION, credentials);
request.setRequestProperty(HEADER_USER_AGENT, userAgent);
request.setRequestProperty(HEADER_ACCEPT,
"application/vnd.github.beta+json"); //$NON-NLS-1$
return request;
}
/**
* Configure request URI
*
* @param uri
* @return configured URI
*/
protected String configureUri(final String uri) {
if (prefix == null || uri.startsWith(prefix))
return uri;
else
return prefix + uri;
}
/**
* Create connection to URI
*
* @param uri
* @return connection
* @throws IOException
*/
protected HttpURLConnection createConnection(String uri) throws IOException {
URL url = new URL(createUri(uri));
return (HttpURLConnection) url.openConnection();
}
/**
* Create connection to URI
*
* @param uri
* @param method
* @return connection
* @throws IOException
*/
protected HttpURLConnection createConnection(String uri, String method)
throws IOException {
HttpURLConnection connection = createConnection(uri);
connection.setRequestMethod(method);
return configureRequest(connection);
}
/**
* Create a GET request connection to the URI
*
* @param uri
* @return connection
* @throws IOException
*/
protected HttpURLConnection createGet(String uri) throws IOException {
return createConnection(uri, METHOD_GET);
}
/**
* Create a POST request connection to the URI
*
* @param uri
* @return connection
* @throws IOException
*/
protected HttpURLConnection createPost(String uri) throws IOException {
return createConnection(uri, METHOD_POST);
}
/**
* Create a PUT request connection to the URI
*
* @param uri
* @return connection
* @throws IOException
*/
protected HttpURLConnection createPut(String uri) throws IOException {
return createConnection(uri, METHOD_PUT);
}
/**
* Create a DELETE request connection to the URI
*
* @param uri
* @return connection
* @throws IOException
*/
protected HttpURLConnection createDelete(String uri) throws IOException {
return createConnection(uri, METHOD_DELETE);
}
/**
* Set credentials
*
* @param user
* @param password
* @return this client
*/
public GitHubClient setCredentials(final String user, final String password) {
this.user = user;
if (user != null && user.length() > 0 && password != null
&& password.length() > 0)
credentials = "Basic " //$NON-NLS-1$
+ EncodingUtils.toBase64(user + ':' + password);
else
credentials = null;
return this;
}
/**
* Set OAuth2 token
*
* @param token
* @return this client
*/
public GitHubClient setOAuth2Token(String token) {
if (token != null && token.length() > 0)
credentials = AUTH_TOKEN + ' ' + token;
else
credentials = null;
return this;
}
/**
* Set buffer size used to send the request and read the response
*
* @param bufferSize
* @return this client
*/
public GitHubClient setBufferSize(int bufferSize) {
if (bufferSize < 1)
throw new IllegalArgumentException(
"Buffer size must be greater than zero"); //$NON-NLS-1$
this.bufferSize = bufferSize;
return this;
}
/**
* Get the user that this client is currently authenticating as
*
* @return user or null if not authentication
*/
public String getUser() {
return user;
}
/**
* Convert object to a JSON string
*
* @param object
* @return JSON string
* @throws IOException
*/
protected String toJson(Object object) throws IOException {
try {
return gson.toJson(object);
} catch (JsonParseException jpe) {
IOException ioe = new IOException(
"Parse exception converting object to JSON"); //$NON-NLS-1$
ioe.initCause(jpe);
throw ioe;
}
}
/**
* Parse JSON to specified type
*
* @param <V>
* @param stream
* @param type
* @return parsed type
* @throws IOException
*/
protected <V> V parseJson(InputStream stream, Type type) throws IOException {
return parseJson(stream, type, null);
}
/**
* Parse JSON to specified type
*
* @param <V>
* @param stream
* @param type
* @param listType
* @return parsed type
* @throws IOException
*/
protected <V> V parseJson(InputStream stream, Type type, Type listType)
throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(
stream, CHARSET_UTF8), bufferSize);
if (listType == null)
try {
return gson.fromJson(reader, type);
} catch (JsonParseException jpe) {
IOException ioe = new IOException(
"Parse exception converting JSON to object"); //$NON-NLS-1$
ioe.initCause(jpe);
throw ioe;
} finally {
try {
reader.close();
} catch (IOException ignored) {
// Ignored
}
}
else {
JsonReader jsonReader = new JsonReader(reader);
try {
if (jsonReader.peek() == BEGIN_ARRAY)
return gson.fromJson(jsonReader, listType);
else
return gson.fromJson(jsonReader, type);
} catch (JsonParseException jpe) {
IOException ioe = new IOException(
"Parse exception converting JSON to object"); //$NON-NLS-1$
ioe.initCause(jpe);
throw ioe;
} finally {
try {
jsonReader.close();
} catch (IOException ignored) {
// Ignored
}
}
}
}
/**
* Does status code denote an error
*
* @param code
* @return true if error, false otherwise
*/
protected boolean isError(final int code) {
switch (code) {
case HTTP_BAD_REQUEST:
case HTTP_UNAUTHORIZED:
case HTTP_FORBIDDEN:
case HTTP_NOT_FOUND:
case HTTP_CONFLICT:
case HTTP_GONE:
case HTTP_UNPROCESSABLE_ENTITY:
case HTTP_INTERNAL_ERROR:
return true;
default:
return false;
}
}
/**
* Does status code denote a non-error response?
*
* @param code
* @return true if okay, false otherwise
*/
protected boolean isOk(final int code) {
switch (code) {
case HTTP_OK:
case HTTP_CREATED:
case HTTP_ACCEPTED:
return true;
default:
return false;
}
}
/**
* Is the response empty?
*
* @param code
* @return true if empty, false otherwise
*/
protected boolean isEmpty(final int code) {
return HTTP_NO_CONTENT == code;
}
/**
* Parse error from response
*
* @param response
* @return request error
* @throws IOException
*/
protected RequestError parseError(InputStream response) throws IOException {
return parseJson(response, RequestError.class);
}
/**
* Get body from response inputs stream
*
* @param request
* @param stream
* @return parsed body
* @throws IOException
*/
protected Object getBody(GitHubRequest request, InputStream stream)
throws IOException {
Type type = request.getType();
if (type != null)
return parseJson(stream, type, request.getArrayType());
else
return null;
}
/**
* Create error exception from response and throw it
*
* @param response
* @param code
* @param status
* @return non-null newly created {@link IOException}
*/
protected IOException createException(InputStream response, int code,
String status) {
if (isError(code)) {
final RequestError error;
try {
error = parseError(response);
} catch (IOException e) {
return e;
}
if (error != null)
return new RequestException(error, code);
} else
try {
response.close();
} catch (IOException ignored) {
// Ignored
}
String message;
if (status != null && status.length() > 0)
message = status + " (" + code + ')'; //$NON-NLS-1$
else
message = "Unknown error occurred (" + code + ')'; //$NON-NLS-1$
return new IOException(message);
}
/**
* Post to URI
*
* @param uri
* @throws IOException
*/
public void post(String uri) throws IOException {
post(uri, null, null);
}
/**
* Put to URI
*
* @param uri
* @throws IOException
*/
public void put(String uri) throws IOException {
put(uri, null, null);
}
/**
* Delete resource at URI. This method will throw an {@link IOException}
* when the response status is not a 204 (No Content).
*
* @param uri
* @throws IOException
*/
public void delete(String uri) throws IOException {
delete(uri, null);
}
/**
* Send parameters to output stream of request
*
* @param request
* @param params
* @throws IOException
*/
protected void sendParams(HttpURLConnection request, Object params)
throws IOException {
request.setDoOutput(true);
if (params != null) {
request.setRequestProperty(HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON
+ "; charset=" + CHARSET_UTF8); //$NON-NLS-1$
byte[] data = toJson(params).getBytes(CHARSET_UTF8);
request.setFixedLengthStreamingMode(data.length);
BufferedOutputStream output = new BufferedOutputStream(
request.getOutputStream(), bufferSize);
try {
output.write(data);
output.flush();
} finally {
try {
output.close();
} catch (IOException ignored) {
// Ignored
}
}
} else {
request.setFixedLengthStreamingMode(0);
request.setRequestProperty("Content-Length", "0");
}
}
private <V> V sendJson(final HttpURLConnection request,
final Object params, final Type type) throws IOException {
sendParams(request, params);
final int code = request.getResponseCode();
updateRateLimits(request);
if (isOk(code))
if (type != null)
return parseJson(getStream(request), type);
else
return null;
if (isEmpty(code))
return null;
throw createException(getStream(request), code,
request.getResponseMessage());
}
/**
* Create full URI from path
*
* @param path
* @return uri
*/
protected String createUri(final String path) {
return baseUri + configureUri(path);
}
/**
* Get response stream from GET to URI. It is the responsibility of the
* calling method to close the returned stream.
*
* @param request
* @return stream
* @throws IOException
*/
public InputStream getStream(final GitHubRequest request)
throws IOException {
return getResponseStream(createGet(request.generateUri()));
}
/**
* Get response stream from POST to URI. It is the responsibility of the
* calling method to close the returned stream.
*
* @param uri
* @param params
* @return stream
* @throws IOException
*/
public InputStream postStream(final String uri, final Object params)
throws IOException {
HttpURLConnection connection = createPost(uri);
sendParams(connection, params);
return getResponseStream(connection);
}
/**
* Get response stream for request
*
* @param request
* @return stream
* @throws IOException
*/
protected InputStream getResponseStream(final HttpURLConnection request)
throws IOException {
InputStream stream = getStream(request);
int code = request.getResponseCode();
updateRateLimits(request);
if (isOk(code))
return stream;
else
throw createException(stream, code, request.getResponseMessage());
}
/**
* Get stream from request
*
* @param request
* @return stream
* @throws IOException
*/
protected InputStream getStream(HttpURLConnection request)
throws IOException {
if (request.getResponseCode() < HTTP_BAD_REQUEST)
return request.getInputStream();
else {
InputStream stream = request.getErrorStream();
return stream != null ? stream : request.getInputStream();
}
}
/**
* Get response from URI and bind to specified type
*
* @param request
* @return response
* @throws IOException
*/
public GitHubResponse get(GitHubRequest request) throws IOException {
HttpURLConnection httpRequest = createGet(request.generateUri());
String accept = request.getResponseContentType();
if (accept != null)
httpRequest.setRequestProperty(HEADER_ACCEPT, accept);
final int code = httpRequest.getResponseCode();
updateRateLimits(httpRequest);
if (isOk(code))
return new GitHubResponse(httpRequest, getBody(request,
getStream(httpRequest)));
if (isEmpty(code))
return new GitHubResponse(httpRequest, null);
throw createException(getStream(httpRequest), code,
httpRequest.getResponseMessage());
}
/**
* Post data to URI
*
* @param <V>
* @param uri
* @param params
* @param type
* @return response
* @throws IOException
*/
public <V> V post(final String uri, final Object params, final Type type)
throws IOException {
HttpURLConnection request = createPost(uri);
return sendJson(request, params, type);
}
/**
* Put data to URI
*
* @param <V>
* @param uri
* @param params
* @param type
* @return response
* @throws IOException
*/
public <V> V put(final String uri, final Object params, final Type type)
throws IOException {
HttpURLConnection request = createPut(uri);
return sendJson(request, params, type);
}
/**
* Delete resource at URI. This method will throw an {@link IOException}
* when the response status is not a 204 (No Content).
*
* @param uri
* @param params
* @throws IOException
*/
public void delete(final String uri, final Object params)
throws IOException {
HttpURLConnection request = createDelete(uri);
if (params != null)
sendParams(request, params);
final int code = request.getResponseCode();
updateRateLimits(request);
if (!isEmpty(code))
throw new RequestException(parseError(getStream(request)), code);
}
/**
* Update rate limits present in response headers
*
* @param request
* @return this client
*/
protected GitHubClient updateRateLimits(HttpURLConnection request) {
String limit = request.getHeaderField("X-RateLimit-Limit");
if (limit != null && limit.length() > 0)
try {
requestLimit = Integer.parseInt(limit);
} catch (NumberFormatException nfe) {
requestLimit = -1;
}
else
requestLimit = -1;
String remaining = request.getHeaderField("X-RateLimit-Remaining");
if (remaining != null && remaining.length() > 0)
try {
remainingRequests = Integer.parseInt(remaining);
} catch (NumberFormatException nfe) {
remainingRequests = -1;
}
else
remainingRequests = -1;
return this;
}
/**
* Get number of requests remaining before rate limiting occurs
* <p>
* This will be the value of the 'X-RateLimit-Remaining' header from the
* last request made
*
* @return remainingRequests or -1 if not present in the response
*/
public int getRemainingRequests() {
return remainingRequests;
}
/**
* Get number of requests that {@link #getRemainingRequests()} counts down
* from as each request is made
* <p>
* This will be the value of the 'X-RateLimit-Limit' header from the last
* request made
*
* @return requestLimit or -1 if not present in the response
*/
public int getRequestLimit() {
return requestLimit;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.pool;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.configuration.ExecutorConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.failure.FailureContext;
import org.apache.ignite.failure.FailureType;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteComponentType;
import org.apache.ignite.internal.managers.communication.GridIoPolicy;
import org.apache.ignite.internal.processors.GridProcessorAdapter;
import org.apache.ignite.internal.processors.plugin.IgnitePluginProcessor;
import org.apache.ignite.internal.processors.security.IgniteSecurity;
import org.apache.ignite.internal.processors.security.thread.SecurityAwareIoPool;
import org.apache.ignite.internal.processors.security.thread.SecurityAwareStripedExecutor;
import org.apache.ignite.internal.processors.security.thread.SecurityAwareStripedThreadPoolExecutor;
import org.apache.ignite.internal.processors.security.thread.SecurityAwareThreadPoolExecutor;
import org.apache.ignite.internal.util.StripedExecutor;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.internal.util.worker.GridWorkerListener;
import org.apache.ignite.internal.worker.WorkersRegistry;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.plugin.extensions.communication.IoPool;
import org.apache.ignite.thread.IgniteStripedThreadPoolExecutor;
import org.apache.ignite.thread.IgniteThreadPoolExecutor;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.configuration.IgniteConfiguration.DFLT_THREAD_KEEP_ALIVE_TIME;
import static org.apache.ignite.failure.FailureType.SYSTEM_WORKER_TERMINATION;
/**
* Processor which abstracts out thread pool management.
*/
public class PoolProcessor extends GridProcessorAdapter {
/** Executor service. */
@GridToStringExclude
private ThreadPoolExecutor execSvc;
/** Executor service for services. */
@GridToStringExclude
private ThreadPoolExecutor svcExecSvc;
/** System executor service. */
@GridToStringExclude
private ThreadPoolExecutor sysExecSvc;
/** */
@GridToStringExclude
private StripedExecutor stripedExecSvc;
/** Management executor service. */
@GridToStringExclude
private ThreadPoolExecutor mgmtExecSvc;
/** P2P executor service. */
@GridToStringExclude
private ThreadPoolExecutor p2pExecSvc;
/** Data streamer executor service. */
@GridToStringExclude
private StripedExecutor dataStreamerExecSvc;
/** REST requests executor service. */
@GridToStringExclude
private ThreadPoolExecutor restExecSvc;
/** Utility cache executor service. */
private ThreadPoolExecutor utilityCacheExecSvc;
/** Affinity executor service. */
@GridToStringExclude
private ThreadPoolExecutor affExecSvc;
/** Indexing pool. */
@GridToStringExclude
private ThreadPoolExecutor idxExecSvc;
/** Thread pool for create/rebuild indexes. */
@GridToStringExclude
private ThreadPoolExecutor buildIdxExecSvc;
/** Continuous query executor service. */
@GridToStringExclude
private IgniteStripedThreadPoolExecutor callbackExecSvc;
/** Query executor service. */
@GridToStringExclude
private ThreadPoolExecutor qryExecSvc;
/** Query executor service. */
@GridToStringExclude
private ThreadPoolExecutor schemaExecSvc;
/** Rebalance executor service. */
@GridToStringExclude
private ThreadPoolExecutor rebalanceExecSvc;
/** Rebalance striped executor service. */
@GridToStringExclude
private IgniteStripedThreadPoolExecutor rebalanceStripedExecSvc;
/** Map of {@link IoPool}-s injected by Ignite plugins. */
private final IoPool[] extPools = new IoPool[128];
/** Custom named pools. */
private Map<String, ThreadPoolExecutor> customExecs;
/**
* Constructor.
*
* @param ctx Kernal context.
*/
public PoolProcessor(GridKernalContext ctx) {
super(ctx);
IgnitePluginProcessor plugins = ctx.plugins();
if (plugins != null) {
// Process custom IO messaging pool extensions:
final IoPool[] executorExtensions = ctx.plugins().extensions(IoPool.class);
if (executorExtensions != null) {
// Store it into the map and check for duplicates:
for (IoPool ex : executorExtensions) {
final byte id = ex.id();
// 1. Check the pool id is non-negative:
if (id < 0)
throw new IgniteException("Failed to register IO executor pool because its ID is " +
"negative: " + id);
// 2. Check the pool id is in allowed range:
if (GridIoPolicy.isReservedGridIoPolicy(id))
throw new IgniteException("Failed to register IO executor pool because its ID in in the " +
"reserved range: " + id);
// 3. Check the pool for duplicates:
if (extPools[id] != null)
throw new IgniteException("Failed to register IO executor pool because its ID as " +
"already used: " + id);
extPools[id] = ctx.security().enabled() ? new SecurityAwareIoPool(ctx.security(), ex) : ex;
}
}
}
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
super.start();
IgniteConfiguration cfg = ctx.config();
UncaughtExceptionHandler oomeHnd = ctx.uncaughtExceptionHandler();
UncaughtExceptionHandler excHnd = new UncaughtExceptionHandler() {
@Override public void uncaughtException(Thread t, Throwable e) {
ctx.failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e));
}
};
validateThreadPoolSize(cfg.getPublicThreadPoolSize(), "public");
execSvc = createExecutorService(
"pub",
cfg.getIgniteInstanceName(),
cfg.getPublicThreadPoolSize(),
cfg.getPublicThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.PUBLIC_POOL,
oomeHnd);
execSvc.allowCoreThreadTimeOut(true);
validateThreadPoolSize(cfg.getServiceThreadPoolSize(), "service");
svcExecSvc = createExecutorService(
"svc",
cfg.getIgniteInstanceName(),
cfg.getServiceThreadPoolSize(),
cfg.getServiceThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.SERVICE_POOL,
oomeHnd);
svcExecSvc.allowCoreThreadTimeOut(true);
validateThreadPoolSize(cfg.getSystemThreadPoolSize(), "system");
sysExecSvc = createExecutorService(
"sys",
cfg.getIgniteInstanceName(),
cfg.getSystemThreadPoolSize(),
cfg.getSystemThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.SYSTEM_POOL,
oomeHnd);
sysExecSvc.allowCoreThreadTimeOut(true);
validateThreadPoolSize(cfg.getStripedPoolSize(), "stripedPool");
WorkersRegistry workerRegistry = ctx.workersRegistry();
stripedExecSvc = createStripedExecutor(
cfg.getStripedPoolSize(),
cfg.getIgniteInstanceName(),
"sys",
log,
new IgniteInClosure<Throwable>() {
@Override public void apply(Throwable t) {
ctx.failure().process(new FailureContext(SYSTEM_WORKER_TERMINATION, t));
}
},
false,
workerRegistry,
cfg.getFailureDetectionTimeout());
// Note that since we use 'LinkedBlockingQueue', number of
// maximum threads has no effect.
// Note, that we do not pre-start threads here as management pool may
// not be needed.
validateThreadPoolSize(cfg.getManagementThreadPoolSize(), "management");
mgmtExecSvc = createExecutorService(
"mgmt",
cfg.getIgniteInstanceName(),
cfg.getManagementThreadPoolSize(),
cfg.getManagementThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.MANAGEMENT_POOL,
oomeHnd);
mgmtExecSvc.allowCoreThreadTimeOut(true);
// Note that since we use 'LinkedBlockingQueue', number of
// maximum threads has no effect.
// Note, that we do not pre-start threads here as class loading pool may
// not be needed.
validateThreadPoolSize(cfg.getPeerClassLoadingThreadPoolSize(), "peer class loading");
p2pExecSvc = createExecutorService(
"p2p",
cfg.getIgniteInstanceName(),
cfg.getPeerClassLoadingThreadPoolSize(),
cfg.getPeerClassLoadingThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.P2P_POOL,
oomeHnd);
p2pExecSvc.allowCoreThreadTimeOut(true);
dataStreamerExecSvc = createStripedExecutor(
cfg.getDataStreamerThreadPoolSize(),
cfg.getIgniteInstanceName(),
"data-streamer",
log,
new IgniteInClosure<Throwable>() {
@Override public void apply(Throwable t) {
ctx.failure().process(new FailureContext(SYSTEM_WORKER_TERMINATION, t));
}
},
true,
workerRegistry,
cfg.getFailureDetectionTimeout());
// Note that we do not pre-start threads here as this pool may not be needed.
validateThreadPoolSize(cfg.getAsyncCallbackPoolSize(), "async callback");
callbackExecSvc = new IgniteStripedThreadPoolExecutor(
cfg.getAsyncCallbackPoolSize(),
cfg.getIgniteInstanceName(),
"callback",
oomeHnd,
false,
0);
if (cfg.getConnectorConfiguration() != null) {
validateThreadPoolSize(cfg.getConnectorConfiguration().getThreadPoolSize(), "connector");
restExecSvc = createExecutorService(
"rest",
cfg.getIgniteInstanceName(),
cfg.getConnectorConfiguration().getThreadPoolSize(),
cfg.getConnectorConfiguration().getThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.UNDEFINED,
oomeHnd
);
restExecSvc.allowCoreThreadTimeOut(true);
}
validateThreadPoolSize(cfg.getUtilityCacheThreadPoolSize(), "utility cache");
utilityCacheExecSvc = createExecutorService(
"utility",
cfg.getIgniteInstanceName(),
cfg.getUtilityCacheThreadPoolSize(),
cfg.getUtilityCacheThreadPoolSize(),
cfg.getUtilityCacheKeepAliveTime(),
new LinkedBlockingQueue<>(),
GridIoPolicy.UTILITY_CACHE_POOL,
oomeHnd);
utilityCacheExecSvc.allowCoreThreadTimeOut(true);
affExecSvc = createExecutorService(
"aff",
cfg.getIgniteInstanceName(),
1,
1,
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.AFFINITY_POOL,
oomeHnd);
affExecSvc.allowCoreThreadTimeOut(true);
if (IgniteComponentType.INDEXING.inClassPath()) {
int cpus = Runtime.getRuntime().availableProcessors();
idxExecSvc = createExecutorService(
"idx",
cfg.getIgniteInstanceName(),
cpus,
cpus * 2,
3000L,
new LinkedBlockingQueue<>(1000),
GridIoPolicy.IDX_POOL,
oomeHnd
);
int buildIdxThreadPoolSize = cfg.getBuildIndexThreadPoolSize();
validateThreadPoolSize(buildIdxThreadPoolSize, "build-idx");
buildIdxExecSvc = createExecutorService(
"build-idx-runner",
cfg.getIgniteInstanceName(),
buildIdxThreadPoolSize,
buildIdxThreadPoolSize,
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.UNDEFINED,
oomeHnd
);
buildIdxExecSvc.allowCoreThreadTimeOut(true);
}
validateThreadPoolSize(cfg.getQueryThreadPoolSize(), "query");
qryExecSvc = createExecutorService(
"query",
cfg.getIgniteInstanceName(),
cfg.getQueryThreadPoolSize(),
cfg.getQueryThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.QUERY_POOL,
oomeHnd);
qryExecSvc.allowCoreThreadTimeOut(true);
schemaExecSvc = createExecutorService(
"schema",
cfg.getIgniteInstanceName(),
2,
2,
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.SCHEMA_POOL,
oomeHnd);
schemaExecSvc.allowCoreThreadTimeOut(true);
validateThreadPoolSize(cfg.getRebalanceThreadPoolSize(), "rebalance");
rebalanceExecSvc = createExecutorService(
"rebalance",
cfg.getIgniteInstanceName(),
cfg.getRebalanceThreadPoolSize(),
cfg.getRebalanceThreadPoolSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.UNDEFINED,
excHnd);
rebalanceExecSvc.allowCoreThreadTimeOut(true);
rebalanceStripedExecSvc = createStripedThreadPoolExecutor(
cfg.getRebalanceThreadPoolSize(),
cfg.getIgniteInstanceName(),
"rebalance-striped",
excHnd,
true,
DFLT_THREAD_KEEP_ALIVE_TIME);
if (!F.isEmpty(cfg.getExecutorConfiguration())) {
validateCustomExecutorsConfiguration(cfg.getExecutorConfiguration());
customExecs = new HashMap<>();
for (ExecutorConfiguration execCfg : cfg.getExecutorConfiguration()) {
ThreadPoolExecutor exec = createExecutorService(
execCfg.getName(),
cfg.getIgniteInstanceName(),
execCfg.getSize(),
execCfg.getSize(),
DFLT_THREAD_KEEP_ALIVE_TIME,
new LinkedBlockingQueue<>(),
GridIoPolicy.UNDEFINED,
oomeHnd);
customExecs.put(execCfg.getName(), exec);
}
}
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
// Avoid external thread pools GC retention.
Arrays.fill(extPools, null);
stopExecutors(log);
}
/**
* Get executor service for policy.
*
* @param plc Policy.
* @return Executor service.
* @throws IgniteCheckedException If failed.
*/
public Executor poolForPolicy(byte plc) throws IgniteCheckedException {
switch (plc) {
case GridIoPolicy.P2P_POOL:
return getPeerClassLoadingExecutorService();
case GridIoPolicy.SYSTEM_POOL:
return getSystemExecutorService();
case GridIoPolicy.PUBLIC_POOL:
return getExecutorService();
case GridIoPolicy.MANAGEMENT_POOL:
return getManagementExecutorService();
case GridIoPolicy.AFFINITY_POOL:
return getAffinityExecutorService();
case GridIoPolicy.IDX_POOL:
assert getIndexingExecutorService() != null : "Indexing pool is not configured.";
return getIndexingExecutorService();
case GridIoPolicy.UTILITY_CACHE_POOL:
assert utilityCachePool() != null : "Utility cache pool is not configured.";
return utilityCachePool();
case GridIoPolicy.SERVICE_POOL:
assert getServiceExecutorService() != null : "Service pool is not configured.";
return getServiceExecutorService();
case GridIoPolicy.DATA_STREAMER_POOL:
assert getDataStreamerExecutorService() != null : "Data streamer pool is not configured.";
return getDataStreamerExecutorService();
case GridIoPolicy.QUERY_POOL:
assert getQueryExecutorService() != null : "Query pool is not configured.";
return getQueryExecutorService();
case GridIoPolicy.SCHEMA_POOL:
assert getSchemaExecutorService() != null : "Query pool is not configured.";
return getSchemaExecutorService();
default: {
if (plc < 0)
throw new IgniteCheckedException("Policy cannot be negative: " + plc);
if (GridIoPolicy.isReservedGridIoPolicy(plc))
throw new IgniteCheckedException("Policy is reserved for internal usage (range 0-31): " + plc);
IoPool pool = extPools[plc];
if (pool == null)
throw new IgniteCheckedException("No pool is registered for policy: " + plc);
assert plc == pool.id();
Executor res = pool.executor();
if (res == null)
throw new IgniteCheckedException("Thread pool for policy is null: " + plc);
return res;
}
}
}
/**
* Gets executor service for custom policy by executor name.
*
* @param name Executor name.
* @return Executor service.
*/
@Nullable public Executor customExecutor(String name) {
assert name != null;
Executor exec = null;
if (customExecs != null)
exec = customExecs.get(name);
return exec;
}
/**
* Gets utility cache pool.
*
* @return Utility cache pool.
*/
public ExecutorService utilityCachePool() {
return utilityCacheExecSvc;
}
/**
* Gets async callback pool.
*
* @return Async callback pool.
*/
public IgniteStripedThreadPoolExecutor asyncCallbackPool() {
return callbackExecSvc;
}
/**
* @return Thread pool implementation to be used in grid to process job execution
* requests and user messages sent to the node.
*/
public ExecutorService getExecutorService() {
return execSvc;
}
/**
* Executor service that is in charge of processing service proxy invocations.
*
* @return Thread pool implementation to be used in grid for service proxy invocations.
*/
public ExecutorService getServiceExecutorService() {
return svcExecSvc;
}
/**
* Executor service that is in charge of processing internal system messages.
*
* @return Thread pool implementation to be used in grid for internal system messages.
*/
public ExecutorService getSystemExecutorService() {
return sysExecSvc;
}
/**
* Executor service that is in charge of processing internal system messages
* in stripes (dedicated threads).
*
* @return Thread pool implementation to be used in grid for internal system messages.
*/
public StripedExecutor getStripedExecutorService() {
return stripedExecSvc;
}
/**
* Executor service that is in charge of processing internal and Visor
* {@link org.apache.ignite.compute.ComputeJob GridJobs}.
*
* @return Thread pool implementation to be used in grid for internal and Visor
* jobs processing.
*/
public ExecutorService getManagementExecutorService() {
return mgmtExecSvc;
}
/**
* @return Thread pool implementation to be used for peer class loading
* requests handling.
*/
public ExecutorService getPeerClassLoadingExecutorService() {
return p2pExecSvc;
}
/**
* Executor service that is in charge of processing data stream messages.
*
* @return Thread pool implementation to be used for data stream messages.
*/
public StripedExecutor getDataStreamerExecutorService() {
return dataStreamerExecSvc;
}
/**
* Should return an instance of fully configured thread pool to be used for
* processing of client messages (REST requests).
*
* @return Thread pool implementation to be used for processing of client
* messages.
*/
public ExecutorService getRestExecutorService() {
return restExecSvc;
}
/**
* Get affinity executor service.
*
* @return Affinity executor service.
*/
public ExecutorService getAffinityExecutorService() {
return affExecSvc;
}
/**
* Get indexing executor service.
*
* @return Indexing executor service.
*/
@Nullable public ExecutorService getIndexingExecutorService() {
return idxExecSvc;
}
/**
* Executor service that is in charge of processing query messages.
*
* @return Thread pool implementation to be used in grid for query messages.
*/
public ExecutorService getQueryExecutorService() {
return qryExecSvc;
}
/**
* Executor services that is in charge of processing user compute task.
*
* @return Map of custom thread pool executors.
*/
@Nullable public Map<String, ? extends ExecutorService> customExecutors() {
return customExecs == null ? null : Collections.unmodifiableMap(customExecs);
}
/**
* Executor service that is in charge of processing schema change messages.
*
* @return Executor service that is in charge of processing schema change messages.
*/
public ExecutorService getSchemaExecutorService() {
return schemaExecSvc;
}
/**
* Executor service that is in charge of processing rebalance messages.
*
* @return Executor service that is in charge of processing rebalance messages.
*/
public ExecutorService getRebalanceExecutorService() {
return rebalanceExecSvc;
}
/**
* Executor service that is in charge of processing unorderable rebalance messages.
*
* @return Executor service that is in charge of processing unorderable rebalance messages.
*/
public IgniteStripedThreadPoolExecutor getStripedRebalanceExecutorService() {
return rebalanceStripedExecSvc;
}
/**
* Return Thread pool for create/rebuild indexes.
*
* @return Thread pool for create/rebuild indexes.
*/
public ExecutorService buildIndexExecutorService() {
return buildIdxExecSvc;
}
/**
* Stops executor services if they has been started.
*
* @param log Grid logger.
*/
private void stopExecutors(IgniteLogger log) {
boolean interrupted = Thread.interrupted();
try {
stopExecutors0(log);
}
finally {
if (interrupted)
Thread.currentThread().interrupt();
}
}
/**
* Stops executor services if they has been started.
*
* @param log Grid logger.
*/
private void stopExecutors0(IgniteLogger log) {
assert log != null;
U.shutdownNow(getClass(), execSvc, log);
execSvc = null;
U.shutdownNow(getClass(), svcExecSvc, log);
svcExecSvc = null;
U.shutdownNow(getClass(), sysExecSvc, log);
sysExecSvc = null;
U.shutdownNow(getClass(), qryExecSvc, log);
qryExecSvc = null;
U.shutdownNow(getClass(), schemaExecSvc, log);
schemaExecSvc = null;
U.shutdownNow(getClass(), rebalanceExecSvc, log);
rebalanceExecSvc = null;
U.shutdownNow(getClass(), rebalanceStripedExecSvc, log);
rebalanceStripedExecSvc = null;
U.shutdownNow(getClass(), stripedExecSvc, log);
stripedExecSvc = null;
U.shutdownNow(getClass(), mgmtExecSvc, log);
mgmtExecSvc = null;
U.shutdownNow(getClass(), p2pExecSvc, log);
p2pExecSvc = null;
U.shutdownNow(getClass(), dataStreamerExecSvc, log);
dataStreamerExecSvc = null;
if (restExecSvc != null)
U.shutdownNow(getClass(), restExecSvc, log);
restExecSvc = null;
U.shutdownNow(getClass(), utilityCacheExecSvc, log);
utilityCacheExecSvc = null;
U.shutdownNow(getClass(), affExecSvc, log);
affExecSvc = null;
U.shutdownNow(getClass(), idxExecSvc, log);
idxExecSvc = null;
U.shutdownNow(getClass(), buildIdxExecSvc, log);
buildIdxExecSvc = null;
U.shutdownNow(getClass(), callbackExecSvc, log);
callbackExecSvc = null;
if (!F.isEmpty(customExecs)) {
for (ThreadPoolExecutor exec : customExecs.values())
U.shutdownNow(getClass(), exec, log);
customExecs = null;
}
}
/**
* @param poolSize an actual value in the configuration.
* @param poolName a name of the pool like 'management'.
* @throws IgniteCheckedException If the poolSize is wrong.
*/
private static void validateThreadPoolSize(int poolSize, String poolName)
throws IgniteCheckedException {
if (poolSize <= 0) {
throw new IgniteCheckedException("Invalid " + poolName + " thread pool size" +
" (must be greater than 0), actual value: " + poolSize);
}
}
/**
* @param cfgs Array of the executors configurations.
* @throws IgniteCheckedException If configuration is wrong.
*/
private static void validateCustomExecutorsConfiguration(ExecutorConfiguration[] cfgs)
throws IgniteCheckedException {
if (cfgs == null)
return;
Set<String> names = new HashSet<>(cfgs.length);
for (ExecutorConfiguration cfg : cfgs) {
if (F.isEmpty(cfg.getName()))
throw new IgniteCheckedException("Custom executor name cannot be null or empty.");
if (!names.add(cfg.getName()))
throw new IgniteCheckedException("Duplicate custom executor name: " + cfg.getName());
if (cfg.getSize() <= 0)
throw new IgniteCheckedException("Custom executor size must be positive [name=" + cfg.getName() +
", size=" + cfg.getSize() + ']');
}
}
/** Creates instance {@link IgniteStripedThreadPoolExecutor} with a notion of whether {@link IgniteSecurity} is enabled. */
private IgniteStripedThreadPoolExecutor createStripedThreadPoolExecutor(
int concurrentLvl,
String igniteInstanceName,
String threadNamePrefix,
UncaughtExceptionHandler eHnd,
boolean allowCoreThreadTimeOut,
long keepAliveTime
) {
return ctx.security().enabled()
? new SecurityAwareStripedThreadPoolExecutor(
ctx.security(),
concurrentLvl,
igniteInstanceName,
threadNamePrefix,
eHnd,
allowCoreThreadTimeOut,
keepAliveTime)
: new IgniteStripedThreadPoolExecutor(
concurrentLvl,
igniteInstanceName,
threadNamePrefix,
eHnd,
allowCoreThreadTimeOut,
keepAliveTime);
}
/** Creates instance {@link StripedExecutor} with a notion of whether {@link IgniteSecurity} is enabled. */
private StripedExecutor createStripedExecutor(
int cnt,
String igniteInstanceName,
String poolName,
final IgniteLogger log,
IgniteInClosure<Throwable> errHnd,
boolean stealTasks,
GridWorkerListener gridWorkerLsnr,
long failureDetectionTimeout
) {
return ctx.security().enabled()
? new SecurityAwareStripedExecutor(
ctx.security(),
cnt,
igniteInstanceName,
poolName,
log,
errHnd,
stealTasks,
gridWorkerLsnr,
failureDetectionTimeout)
: new StripedExecutor(cnt, igniteInstanceName, poolName, log, errHnd, stealTasks, gridWorkerLsnr, failureDetectionTimeout);
}
/** Creates instance {@link IgniteThreadPoolExecutor} with a notion of whether {@link IgniteSecurity} is enabled. */
private IgniteThreadPoolExecutor createExecutorService(
String threadNamePrefix,
String igniteInstanceName,
int corePoolSize,
int maxPoolSize,
long keepAliveTime,
BlockingQueue<Runnable> workQ,
byte plc,
UncaughtExceptionHandler eHnd
) {
return ctx.security().enabled()
? new SecurityAwareThreadPoolExecutor(
ctx.security(),
threadNamePrefix,
igniteInstanceName,
corePoolSize,
maxPoolSize,
keepAliveTime,
workQ,
plc,
eHnd)
: new IgniteThreadPoolExecutor(
threadNamePrefix,
igniteInstanceName,
corePoolSize,
maxPoolSize,
keepAliveTime,
workQ,
plc,
eHnd);
}
}
| |
/*
* Ferox, a graphics and game library in Java
*
* Copyright (c) 2012, Michael Ludwig
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.ferox.input.logic;
import com.ferox.input.*;
import java.util.*;
/**
* <p/>
* InputManager provides a higher level input handling API on top of the event-based system in
* com.ferox.input. It uses {@link Predicate predicates} to determine when to run specific {@link Action
* actions}. Instead of being executed whenever an event occurs, these are invoked in a controlled manner
* during the game loop by calling {@link #process()} each frame.
* <p/>
* Here is example code to configure and use an InputManager:
* <p/>
* <pre>
* MouseEventKeySource window; // however you get one of these (e.g.
* Framework.createSurface())
* InputManager manager = new InputManager();
* manager.on(Predicates.keyPressed(KeyCode.ESCAPE))
* .trigger(new Action() { ... });
* // add any other actions desired
* manager.attach(window);
*
* while(true) {
* manager.process();
* // update
* // render
* }
* </pre>
*
* @author Michael Ludwig
*/
public class InputManager {
private InputState lastState;
private InputState lastProcessedState;
private final Queue<InputState> stateQueue;
private final InternalListener listener; // also acts as synchronization lock
private final List<PredicatedAction> triggers;
private MouseKeyEventSource source;
/**
* Create a new InputManager that is not attached to any MouseKeyEventSource, and must be attached before
* it can process any events. It's still permissible to register actions before attaching to an event
* source.
*/
public InputManager() {
stateQueue = new ArrayDeque<InputState>();
triggers = new ArrayList<PredicatedAction>();
listener = new InternalListener();
lastState = new InputState();
lastProcessedState = lastState;
}
/**
* <p/>
* Attach the InputManager to the given MouseKeyEventSource. The manager can only be attached to a single
* event source at a time and must be detached before listening on another source.
* <p/>
* After being attached, the manager will listen to all events from the source and accumulate them as a
* list of {@link InputState state} changes. New states can be processed every frame to trigger actions by
* calling {@link #process()}.
*
* @param source The source to attach to
*
* @throws NullPointerException if source is null
* @throws IllegalStateException if the manager is currently attached to another component
*/
public void attach(MouseKeyEventSource source) {
if (source == null) {
throw new NullPointerException("Source cannot be null");
}
synchronized (this) {
if (this.source != null) {
throw new IllegalStateException("InputManager already attached to another event source");
}
source.addKeyListener(listener);
source.addMouseListener(listener);
this.source = source;
}
}
/**
* Detach this InputManager from the event source it's currently attached to. If the adapter is not
* attached to a component, nothing happens. After detaching, the manager will no longer receive events
* and calling {@link #process()} will no longer work.
*/
public void detach() {
synchronized (this) {
if (source != null) {
source.removeKeyListener(listener);
source.removeMouseListener(listener);
source = null;
}
}
}
/**
* @return The event source this manager is attached to or null
*/
public MouseKeyEventSource getEventSource() {
return source;
}
/**
* <p/>
* Begin registering a new action with this InputManager that will be triggered when <var>predicate</var>
* evaluates to true. The action will not be registered until {@link ActionBuilder#trigger(Action)} is
* called on the returned ActionBuilder.
* <p/>
* This allows code to read reasonably fluently: <code>manager.on(condition).trigger(action);</code>
*
* @param predicate The predicate that controls when the action is executed
*
* @return An ActionBuilder to complete the registering process
*
* @throws NullPointerException if predicate is null
*/
public ActionBuilder on(Predicate predicate) {
return new ActionBuilderImpl(predicate);
}
/**
* Remove or unregister the given action from this manager. If the action was registered with multiple
* predicates, all occurrences of it will be removed to guarantee that <var>trigger</var> can no longer be
* invoked as a result of calling this manager's {@link #process()} method.
*
* @param trigger The trigger to remove
*
* @throws NullPointerException if trigger is null
*/
public void removeAction(Action trigger) {
if (trigger == null) {
throw new NullPointerException("Action cannot be null");
}
synchronized (listener) {
Iterator<PredicatedAction> it = triggers.iterator();
while (it.hasNext()) {
// remove all occurrences of the action
if (it.next().trigger == trigger) {
it.remove();
}
}
}
}
/**
* Process all events that have been accumulated since the last call to {@link #process()} and run all
* actions that are triggered based on their associated predicate. This will run the actions on the
* calling thread.
*/
public void process() {
synchronized (listener) {
InputState prev = lastProcessedState;
for (InputState next : stateQueue) {
processTriggers(prev, next);
prev = next;
}
lastProcessedState = new InputState(lastState);
processTriggers(prev, lastProcessedState);
stateQueue.clear();
}
}
private void processTriggers(InputState prev, InputState next) {
int ct = triggers.size();
for (int i = 0; i < ct; i++) {
triggers.get(i).apply(prev, next);
}
}
// caller must be synchronized on event listener
private void advanceState(InputState next) {
lastState = next;
stateQueue.add(next);
}
/*
* Internal class used to listen for events to prevent InputManager being
* used as a listener directly. It is also the monitor used by each manager.
*/
private class InternalListener implements KeyListener, MouseListener {
@Override
public void handleEvent(KeyEvent event) {
synchronized (this) {
advanceState(new InputState(lastState, event));
}
}
@Override
public void handleEvent(MouseEvent event) {
synchronized (this) {
advanceState(new InputState(lastState, event));
}
}
}
/*
* Internal ActionBuilder implementation
*/
private class ActionBuilderImpl implements ActionBuilder {
Predicate condition;
public ActionBuilderImpl(Predicate base) {
if (base == null) {
throw new NullPointerException("Predicate cannot be null");
}
condition = base;
}
@Override
public void trigger(Action action) {
if (action == null) {
throw new NullPointerException("Action cannot be null");
}
synchronized (listener) {
triggers.add(new PredicatedAction(action, condition));
}
}
}
/*
* Simple pair between an action and its triggering predicate
*/
private static class PredicatedAction {
final Action trigger;
final Predicate condition;
public PredicatedAction(Action trigger, Predicate condition) {
this.trigger = trigger;
this.condition = condition;
}
public void apply(InputState prev, InputState next) {
if (condition.apply(prev, next)) {
trigger.perform(prev, next);
}
}
}
}
| |
package com.jeremysu1.catscalore;
import android.graphics.Color;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Random;
public class GameActivity extends AppCompatActivity {
private ArrayList<Button> gridButtons = new ArrayList<>();
private ArrayList<Integer> gridColors = new ArrayList<>();
private ArrayList<Integer> remainingButtons = new ArrayList<>(Arrays.asList(0,1,2,3,4,5,6,7,8,9,10,11));
private int current_button = -1;
private Random rand = new Random();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_game2);
gridButtons.add((Button) findViewById(R.id.btn0));
gridButtons.add((Button) findViewById(R.id.btn1));
gridButtons.add((Button) findViewById(R.id.btn2));
gridButtons.add((Button) findViewById(R.id.btn3));
gridButtons.add((Button) findViewById(R.id.btn4));
gridButtons.add((Button) findViewById(R.id.btn5));
gridButtons.add((Button) findViewById(R.id.btn6));
gridButtons.add((Button) findViewById(R.id.btn7));
gridButtons.add((Button) findViewById(R.id.btn8));
gridButtons.add((Button) findViewById(R.id.btn9));
gridButtons.add((Button) findViewById(R.id.btn10));
gridButtons.add((Button) findViewById(R.id.btn11));
for(int i = 0; i < 12; i++) {
setRandomColors(i, rand);
}
getImage();
selectNewColor();
}
private void getImage(){
ImageView catPic = (ImageView) findViewById(R.id.catImage);
Picasso.with(this).load("https://thecatapi.com/api/images/get?type=jpg").into(catPic);
}
private void setRandomColors(int button_id, Random rand){
Button btn = gridButtons.get(button_id);
int R = rand.nextInt(256);
int G = rand.nextInt(256);
int B = rand.nextInt(256);
int A = 255;
int color = Color.argb(A, R, G, B);
gridColors.add(button_id, color);
btn.setBackgroundColor(color);
}
private void selectNewColor(){
int size = remainingButtons.size();
if(size == 0)
return;
int new_button_id_index = rand.nextInt(size);
int new_button_id = remainingButtons.get(new_button_id_index);
int color = gridColors.get(new Integer(new_button_id));
int red = Color.red(color);
int green = Color.green(color);
int blue = Color.blue(color);
current_button = new_button_id;
TextView r_view = (TextView) findViewById(R.id.redTextView);
TextView g_view = (TextView) findViewById(R.id.greenTextView);
TextView b_view = (TextView) findViewById(R.id.blueTextView);
r_view.setText(Integer.toString(red));
g_view.setText(Integer.toString(green));
b_view.setText(Integer.toString(blue));
}
public void gridButtonClick(View view){
Button btn;
int id = view.getId();
switch(id){
case R.id.btn0 :
if(current_button == 0) {
btn = (Button) findViewById(R.id.btn0);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(0, -1);
remainingButtons.remove(new Integer(0));
selectNewColor();
}
break;
case R.id.btn1 :
if(current_button == 1) {
btn = (Button) findViewById(R.id.btn1);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(1, -1);
remainingButtons.remove(new Integer(1));
selectNewColor();
}
break;
case R.id.btn2 :
if(current_button == 2) {
btn = (Button) findViewById(R.id.btn2);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(2, -1);
remainingButtons.remove(new Integer(2));
selectNewColor();
}
break;
case R.id.btn3 :
if(current_button == 3) {
btn = (Button) findViewById(R.id.btn3);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(3, -1);
remainingButtons.remove(new Integer(3));
selectNewColor();
}
break;
case R.id.btn4 :
if(current_button == 4) {
btn = (Button) findViewById(R.id.btn4);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(4, -1);
remainingButtons.remove(new Integer(4));
selectNewColor();
}
break;
case R.id.btn5 :
if(current_button == 5) {
btn = (Button) findViewById(R.id.btn5);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(5, -1);
remainingButtons.remove(new Integer(5));
selectNewColor();
}
break;
case R.id.btn6 :
if(current_button == 6) {
btn = (Button) findViewById(R.id.btn6);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(6, -1);
remainingButtons.remove(new Integer(6));
selectNewColor();
}
break;
case R.id.btn7 :
if(current_button == 7) {
btn = (Button) findViewById(R.id.btn7);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(7, -1);
remainingButtons.remove(new Integer(7));
selectNewColor();
}
break;
case R.id.btn8 :
if(current_button == 8) {
btn = (Button) findViewById(R.id.btn8);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(8, -1);
remainingButtons.remove(new Integer(8));
selectNewColor();
}
break;
case R.id.btn9 :
if(current_button == 9) {
btn = (Button) findViewById(R.id.btn9);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(9, -1);
remainingButtons.remove(new Integer(9));
selectNewColor();
}
break;
case R.id.btn10 :
if(current_button == 10) {
btn = (Button) findViewById(R.id.btn10);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(10, -1);
remainingButtons.remove(new Integer(10));
selectNewColor();
}
break;
case R.id.btn11 :
if(current_button == 11) {
btn = (Button) findViewById(R.id.btn11);
btn.setVisibility(View.INVISIBLE);
//remainingButtons.set(11, -1);
remainingButtons.remove(new Integer(11));
selectNewColor();
}
break;
}
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.util;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
/**
* @author Peter Karich
*/
public class DouglasPeuckerTest {
// get some real life points from graphhopper API
// http://217.92.216.224:8080/?point=49.945642,11.571436&point=49.946001,11.580706
private final String points1 = "[[11.571499218899739,49.945605917549265],[11.571664621792689,49.94570668665409],[11.571787742639804,49.94578156499077],[11.572065649302282,49.94590338198625],[11.572209445511016,49.94595944760649],[11.57229438213172,49.94598850487147],"
+ "[11.573315297960832,49.946237913062525],[11.57367665112786,49.946338495902836],[11.573895511937787,49.94641784458796],[11.574013417378367,49.94646347939514],[11.574228180368875,49.94654916107392],[11.574703899950622,49.94677509993557],"
+ "[11.575003599561832,49.946924670344394],[11.575434615658997,49.94711838544425],[11.575559971680342,49.94716010869652],[11.57563783024932,49.947186185729194],[11.57609697228887,49.94727875919518],[11.57656188852851,49.947290121330845],"
+ "[11.576840167720023,49.94727782787258],[11.576961425921949,49.94725827009808],[11.577226852861648,49.947215242994176],[11.577394863457863,49.94717668623872],[11.577511092517772,49.94715005041249],[11.577635517216523,49.947112238715114],"
+ "[11.577917149169382,49.94702655703634],[11.577969116970207,49.947010724552214],[11.578816061738493,49.94673523932849],[11.579533552666014,49.94648974269233],[11.580073719771365,49.946299007824784],[11.580253092503245,49.946237913062525],"
+ "[11.580604946179799,49.94608871518274],[11.580740546749693,49.94603041438826]]";
private final String points2 = "[[9.961074440801317,50.203764443183644],[9.96106605889796,50.20365789987872],[9.960999562464645,50.20318963087774],[9.96094144793469,50.202952888673984],[9.96223002587773,50.20267889356641],[9.962200968612752,50.20262022024289],"
+ "[9.961859918278305,50.201853928011374],[9.961668810881722,50.20138565901039],[9.96216874485095,50.20128507617008],[9.961953795595925,50.20088553877664],[9.961899033827313,50.200686794534775],[9.961716680863127,50.20014066696481],[9.961588158344957,50.199798499043254]]";
@Test
public void testParse() {
PointList pointList = new PointList();
pointList.parse2DJSON("[[11.571499218899739,49.945605917549265],[11.571664621792689,49.94570668665409]]");
assertEquals(49.945605917549265, pointList.getLat(0), 1e-6);
assertEquals(11.571499218899739, pointList.getLon(0), 1e-6);
assertEquals(49.94570668665409, pointList.getLat(1), 1e-6);
assertEquals(11.571664621792689, pointList.getLon(1), 1e-6);
}
@Test
public void testPathSimplify() {
PointList pointList = new PointList();
pointList.parse2DJSON(points1);
assertEquals(32, pointList.size());
new DouglasPeucker().setMaxDistance(.5).simplify(pointList);
// Arrays.asList(2, 4, 6, 7, 8, 9, 12, 14, 15, 17, 18, 19, 20, 22, 24, 27, 28, 29, 31, 33),
assertEquals(20, pointList.size());
}
@Test
public void testSimplifyCheckPointCount() {
PointList pointList = new PointList();
pointList.parse2DJSON(points1);
DouglasPeucker dp = new DouglasPeucker().setMaxDistance(.5);
assertEquals(32, pointList.size());
dp.simplify(pointList);
assertEquals(20, pointList.size());
assertFalse(pointList.toString().contains("NaN"), pointList.toString());
pointList.clear();
pointList.parse2DJSON(points1);
dp.simplify(pointList, 0, pointList.size() - 1);
assertEquals(20, pointList.size());
pointList.clear();
pointList.parse2DJSON(points1);
int removed1 = dp.simplify(pointList.copy(10, 20));
pointList.clear();
pointList.parse2DJSON(points1);
int removed2 = dp.simplify(pointList, 10, 19);
assertEquals(removed1, removed2);
}
@Test
public void testSimplifyCheckPointOrder() {
PointList pointList = new PointList();
pointList.parse2DJSON(points2);
assertEquals(13, pointList.size());
new DouglasPeucker().setMaxDistance(.5).simplify(pointList);
assertEquals(11, pointList.size());
assertFalse(pointList.toString().contains("NaN"), pointList.toString());
assertEquals("(50.203764443183644,9.961074440801317), (50.20318963087774,9.960999562464645), (50.202952888673984,9.96094144793469), (50.20267889356641,9.96223002587773), (50.201853928011374,9.961859918278305), "
+ "(50.20138565901039,9.961668810881722), (50.20128507617008,9.96216874485095), (50.20088553877664,9.961953795595925), (50.200686794534775,9.961899033827313), (50.20014066696481,9.961716680863127), (50.199798499043254,9.961588158344957)",
pointList.toString());
}
@Test
public void testRemoveNaN() {
PointList pl = new PointList(10, true);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(1, 1, 1);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(5, 5, 5);
pl.add(6, 6, 6);
pl.add(7, 7, 7);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(8, 8, 8);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(9, 9, 9);
pl.add(10, 10, 10);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(Double.NaN, Double.NaN, Double.NaN);
pl.add(14, 14, 14);
pl.add(Double.NaN, Double.NaN, Double.NaN);
DouglasPeucker.removeNaN(pl);
// doing it again should be no problem
DouglasPeucker.removeNaN(pl);
DouglasPeucker.removeNaN(pl);
assertEquals(8, pl.size());
List<Integer> expected = Arrays.asList(1, 5, 6, 7, 8, 9, 10, 14);
List<Integer> given = new ArrayList<>();
for (int i = 0; i < pl.size(); i++) {
assertEquals(pl.getLat(i), pl.getEle(i), 1.e-6);
assertEquals(pl.getLon(i), pl.getEle(i), 1.e-6);
given.add((int) pl.getLat(i));
}
assertEquals(expected, given);
}
@Test
public void test3dPathSimplify() {
PointList pointList = new PointList(5, true);
pointList.add(0, 0, 0);
pointList.add(0.01, 0, 10); // can be removed
pointList.add(0.02, 0, 20); // can be removed
pointList.add(0.03, 0, 30); // can't be removed
pointList.add(0.04, 0, 50);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(1).simplify(pointList);
assertEquals("(0.0,0.0,0.0), (0.03,0.0,30.0), (0.04,0.0,50.0)", pointList.toString());
}
@Test
public void test3dPathSimplifyElevationDisabled() {
PointList pointList = new PointList(5, true);
pointList.add(0, 0, 0);
pointList.add(0.03, 0, 30); // would be kept, if we cared about elevation
pointList.add(0.04, 0, 50);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(Double.MAX_VALUE).simplify(pointList);
assertEquals("(0.0,0.0,0.0), (0.04,0.0,50.0)", pointList.toString());
}
@Test
public void test3dPathSimplifyElevationMaxDistFive() {
PointList pointList = new PointList(5, true);
pointList.add(0, 0, 0);
pointList.add(0.01, 0, 14); // <5m from straight line (10), remove
pointList.add(0.02, 0, 20); // on straight line, remove
pointList.add(0.03, 0, 30); // >5m from straight line, keep
pointList.add(0.04, 0, 50);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(5).simplify(pointList);
assertEquals("(0.0,0.0,0.0), (0.03,0.0,30.0), (0.04,0.0,50.0)", pointList.toString());
}
@Test
public void test3dPathSimplifyWithMissingElevation() {
PointList pointList = new PointList(5, true);
pointList.add(0, 0, 0);
pointList.add(0, 0.5, Double.NaN); // on straight line in 2d space, ignore elevation
pointList.add(0, 1, 14); // <5m from straight line (10), remove
pointList.add(1, 1, 20);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(1).simplify(pointList);
assertEquals("(0.0,0.0,0.0), (0.0,1.0,14.0), (1.0,1.0,20.0)", pointList.toString());
}
@Test
public void test3dSimplifyStartEndSame() {
PointList pointList = new PointList(3, true);
pointList.add(0, 0, 0);
pointList.add(0.03, 0, 30);
pointList.add(0, 0, 0);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(1).simplify(pointList);
assertEquals("(0.0,0.0,0.0), (0.03,0.0,30.0), (0.0,0.0,0.0)", pointList.toString());
}
@Test
public void test2dSimplifyStartEndSame() {
PointList pointList = new PointList(3, false);
pointList.add(0, 0);
pointList.add(0.03, 0);
pointList.add(0, 0);
new DouglasPeucker().setMaxDistance(1).setElevationMaxDistance(1).simplify(pointList);
assertEquals("(0.0,0.0), (0.03,0.0), (0.0,0.0)", pointList.toString());
}
}
| |
package pl.pronux.sokker.data.sql.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import pl.pronux.sokker.data.sql.dto.CoachDto;
import pl.pronux.sokker.model.Coach;
import pl.pronux.sokker.model.Person;
import pl.pronux.sokker.model.Training;
public class TrainersDao {
private Connection connection;
public TrainersDao(Connection connection) {
this.connection = connection;
}
public void clearCoaches() throws SQLException {
PreparedStatement ps = connection.prepareStatement("DELETE FROM coach");
ps.executeUpdate();
ps.close();
}
public void deleteCoachesAtTraining(Training training) throws SQLException {
PreparedStatement ps = connection.prepareStatement("DELETE FROM coaches_at_trainings WHERE id_training = ?");
ps.setInt(1, training.getId());
ps.executeUpdate();
ps.close();
}
/**
*
* @param status
* 0 - engaged, 1 - fired
* @return
* @throws SQLException
*/
public List<Coach> getCoaches(int status) throws SQLException {
List<Coach> alCoach = new ArrayList<Coach>();
PreparedStatement ps = connection
.prepareStatement("SELECT id_coach, name, surname,job, signed, countryfrom, age, salary, generalskill, stamina, pace, technique, passing, keepers, defenders, playmakers, scorers, status, note FROM coach WHERE status = ? ORDER BY surname");
ps.setInt(1, status);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
Coach coach = new CoachDto(rs).getCoach();
alCoach.add(coach);
}
rs.close();
ps.close();
return alCoach;
}
public Training getCoachesAtTraining(Training training, Map<Integer, Coach> coachMap) throws SQLException {
training.setAssistants(new ArrayList<Coach>());
PreparedStatement pstm = connection.prepareStatement("SELECT id_coach,id_job FROM coaches_at_trainings WHERE id_training = ?");
pstm.setInt(1, training.getId());
ResultSet rs = pstm.executeQuery();
while (rs.next()) {
int coachId = rs.getInt(1);
int job = rs.getInt(2);
switch (job) {
case Coach.JOB_HEAD:
training.setHeadCoach(coachMap.get(coachId));
break;
case Coach.JOB_JUNIORS:
training.setJuniorCoach(coachMap.get(coachId));
break;
case Coach.JOB_ASSISTANT:
training.getAssistants().add(coachMap.get(coachId));
break;
default:
break;
}
}
rs.close();
pstm.close();
return training;
}
public boolean existsCoach(int id) throws SQLException {
PreparedStatement ps = connection.prepareStatement("SELECT count(id_coach) FROM coach WHERE id_coach = ?");
ps.setInt(1, id);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
if (rs.getInt(1) == 1) {
rs.close();
ps.close();
return true;
} else {
rs.close();
ps.close();
return false;
}
}
return false;
}
public boolean existsCoachHistory(int id) throws SQLException {
PreparedStatement ps = connection.prepareStatement("SELECT count(id_coach) FROM coach WHERE id_coach = ? AND status > 0");
ps.setInt(1, id);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
if (rs.getInt(1) == 1) {
rs.close();
ps.close();
return true;
} else {
rs.close();
ps.close();
return false;
}
}
return false;
}
public void addCoach(Coach coach) throws SQLException {
PreparedStatement pstm = connection
.prepareStatement("INSERT INTO coach(id_coach,signed,name,surname,job,countryfrom,generalskill,stamina,pace,technique,passing,keepers,defenders,playmakers,scorers,salary,age, status) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,0)");
pstm.setInt(1, coach.getId());
pstm.setInt(2, coach.getSigned());
pstm.setString(3, coach.getName());
pstm.setString(4, coach.getSurname());
pstm.setInt(5, coach.getJob());
pstm.setInt(6, coach.getCountryfrom());
if (coach.getGeneralskill() >= 0) {
pstm.setInt(7, coach.getGeneralskill());
} else {
pstm.setInt(7, 0);
}
if (coach.getStamina() >= 0) {
pstm.setInt(8, coach.getStamina());
} else {
pstm.setInt(8, 0);
}
if (coach.getPace() >= 0) {
pstm.setInt(9, coach.getPace());
} else {
pstm.setInt(9, 0);
}
if (coach.getTechnique() >= 0) {
pstm.setInt(10, coach.getTechnique());
} else {
pstm.setInt(10, 0);
}
if (coach.getPassing() >= 0) {
pstm.setInt(11, coach.getPassing());
} else {
pstm.setInt(11, 0);
}
if (coach.getKeepers() >= 0) {
pstm.setInt(12, coach.getKeepers());
} else {
pstm.setInt(12, 0);
}
if (coach.getDefenders() >= 0) {
pstm.setInt(13, coach.getDefenders());
} else {
pstm.setInt(13, 0);
}
if (coach.getPlaymakers() >= 0) {
pstm.setInt(14, coach.getPlaymakers());
} else {
pstm.setInt(14, 0);
}
if (coach.getScorers() >= 0) {
pstm.setInt(15, coach.getScorers());
} else {
pstm.setInt(15, 0);
}
pstm.setInt(16, coach.getSalary().toInt());
pstm.setInt(17, coach.getAge());
pstm.executeUpdate();
pstm.close();
}
public void addCoachesAtTraining(Training training) throws SQLException {
PreparedStatement pstm = null;
if (training.getHeadCoach() != null) {
pstm = connection.prepareStatement("INSERT INTO coaches_at_trainings(id_training, id_coach, id_job) VALUES (?,?,?)");
pstm.setInt(1, training.getId());
pstm.setInt(2, training.getHeadCoach().getId());
pstm.setInt(3, Coach.JOB_HEAD);
pstm.executeUpdate();
}
if (training.getJuniorCoach() != null) {
pstm = connection.prepareStatement("INSERT INTO coaches_at_trainings(id_training, id_coach, id_job) VALUES (?,?,?)");
pstm.setInt(1, training.getId());
pstm.setInt(2, training.getJuniorCoach().getId());
pstm.setInt(3, Coach.JOB_JUNIORS);
pstm.executeUpdate();
}
List<Coach> assistants = training.getAssistants();
for (Coach coach : assistants) {
pstm = connection.prepareStatement("INSERT INTO coaches_at_trainings(id_training, id_coach, id_job) VALUES (?,?,?)");
pstm.setInt(1, training.getId());
pstm.setInt(2, coach.getId());
pstm.setInt(3, Coach.JOB_ASSISTANT);
pstm.executeUpdate();
}
if (pstm != null) {
pstm.close();
}
}
public void moveCoach(int id, int status) throws SQLException {
PreparedStatement ps = connection.prepareStatement("UPDATE coach SET status = ? WHERE id_coach = ?");
ps.setInt(1, status);
ps.setInt(2, id);
ps.executeUpdate();
ps.close();
}
public String removeCoaches() throws SQLException {
String deletedCoaches = "";
PreparedStatement ps = connection.prepareStatement("SELECT id_coach FROM coach WHERE status = 0");
ResultSet rs = ps.executeQuery();
while (rs.next()) {
deletedCoaches = deletedCoaches + rs.getInt(1) + '\n';
moveCoach(rs.getInt(1), Coach.STATUS_SACKED);
}
rs.close();
ps.close();
return deletedCoaches;
}
public String removeCoaches(String sTemp) throws SQLException {
String deletedCoaches = "";
PreparedStatement ps = connection.prepareStatement("SELECT id_coach FROM coach WHERE status = 0 AND id_coach NOT IN " + sTemp);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
deletedCoaches = deletedCoaches + rs.getInt(1) + '\n';
moveCoach(rs.getInt(1), Coach.STATUS_SACKED);
}
rs.close();
ps.close();
return deletedCoaches;
}
public void updateCoach(Coach coach) throws SQLException {
PreparedStatement ps = connection.prepareStatement("UPDATE coach SET job = ?, age = ?, signed = ? WHERE id_coach = ?");
ps.setInt(1, coach.getJob());
ps.setInt(2, coach.getAge());
ps.setInt(3, coach.getSigned());
ps.setInt(4, coach.getId());
ps.executeUpdate();
ps.close();
}
public void updateCoachNote(Person coach) throws SQLException {
PreparedStatement ps = connection.prepareStatement("UPDATE coach SET note = ? WHERE id_coach = ?");
ps.setString(1, coach.getNote());
ps.setLong(2, coach.getId());
ps.executeUpdate();
ps.close();
}
public void repairCoach(Coach coach) throws SQLException {
PreparedStatement ps = connection.prepareStatement("UPDATE coach SET keepers = ? WHERE id_coach = ?");
if (coach.getKeepers() >= 0) {
ps.setInt(1, coach.getKeepers());
} else {
ps.setInt(1, 0);
}
ps.setInt(2, coach.getId());
ps.executeUpdate();
ps.close();
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.serviceusage.v1.stub;
import static com.google.api.serviceusage.v1.ServiceUsageClient.ListServicesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.api.serviceusage.v1.BatchEnableServicesRequest;
import com.google.api.serviceusage.v1.BatchEnableServicesResponse;
import com.google.api.serviceusage.v1.BatchGetServicesRequest;
import com.google.api.serviceusage.v1.BatchGetServicesResponse;
import com.google.api.serviceusage.v1.DisableServiceRequest;
import com.google.api.serviceusage.v1.DisableServiceResponse;
import com.google.api.serviceusage.v1.EnableServiceRequest;
import com.google.api.serviceusage.v1.EnableServiceResponse;
import com.google.api.serviceusage.v1.GetServiceRequest;
import com.google.api.serviceusage.v1.ListServicesRequest;
import com.google.api.serviceusage.v1.ListServicesResponse;
import com.google.api.serviceusage.v1.OperationMetadata;
import com.google.api.serviceusage.v1.Service;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ServiceUsageStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (serviceusage.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getService to 30 seconds:
*
* <pre>{@code
* ServiceUsageStubSettings.Builder serviceUsageSettingsBuilder =
* ServiceUsageStubSettings.newBuilder();
* serviceUsageSettingsBuilder
* .getServiceSettings()
* .setRetrySettings(
* serviceUsageSettingsBuilder
* .getServiceSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* ServiceUsageStubSettings serviceUsageSettings = serviceUsageSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ServiceUsageStubSettings extends StubSettings<ServiceUsageStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/cloud-platform.read-only")
.add("https://www.googleapis.com/auth/service.management")
.build();
private final UnaryCallSettings<EnableServiceRequest, Operation> enableServiceSettings;
private final OperationCallSettings<
EnableServiceRequest, EnableServiceResponse, OperationMetadata>
enableServiceOperationSettings;
private final UnaryCallSettings<DisableServiceRequest, Operation> disableServiceSettings;
private final OperationCallSettings<
DisableServiceRequest, DisableServiceResponse, OperationMetadata>
disableServiceOperationSettings;
private final UnaryCallSettings<GetServiceRequest, Service> getServiceSettings;
private final PagedCallSettings<
ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>
listServicesSettings;
private final UnaryCallSettings<BatchEnableServicesRequest, Operation>
batchEnableServicesSettings;
private final OperationCallSettings<
BatchEnableServicesRequest, BatchEnableServicesResponse, OperationMetadata>
batchEnableServicesOperationSettings;
private final UnaryCallSettings<BatchGetServicesRequest, BatchGetServicesResponse>
batchGetServicesSettings;
private static final PagedListDescriptor<ListServicesRequest, ListServicesResponse, Service>
LIST_SERVICES_PAGE_STR_DESC =
new PagedListDescriptor<ListServicesRequest, ListServicesResponse, Service>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListServicesRequest injectToken(ListServicesRequest payload, String token) {
return ListServicesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListServicesRequest injectPageSize(ListServicesRequest payload, int pageSize) {
return ListServicesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListServicesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListServicesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Service> extractResources(ListServicesResponse payload) {
return payload.getServicesList() == null
? ImmutableList.<Service>of()
: payload.getServicesList();
}
};
private static final PagedListResponseFactory<
ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>
LIST_SERVICES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>() {
@Override
public ApiFuture<ListServicesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListServicesRequest, ListServicesResponse> callable,
ListServicesRequest request,
ApiCallContext context,
ApiFuture<ListServicesResponse> futureResponse) {
PageContext<ListServicesRequest, ListServicesResponse, Service> pageContext =
PageContext.create(callable, LIST_SERVICES_PAGE_STR_DESC, request, context);
return ListServicesPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to enableService. */
public UnaryCallSettings<EnableServiceRequest, Operation> enableServiceSettings() {
return enableServiceSettings;
}
/** Returns the object with the settings used for calls to enableService. */
public OperationCallSettings<EnableServiceRequest, EnableServiceResponse, OperationMetadata>
enableServiceOperationSettings() {
return enableServiceOperationSettings;
}
/** Returns the object with the settings used for calls to disableService. */
public UnaryCallSettings<DisableServiceRequest, Operation> disableServiceSettings() {
return disableServiceSettings;
}
/** Returns the object with the settings used for calls to disableService. */
public OperationCallSettings<DisableServiceRequest, DisableServiceResponse, OperationMetadata>
disableServiceOperationSettings() {
return disableServiceOperationSettings;
}
/** Returns the object with the settings used for calls to getService. */
public UnaryCallSettings<GetServiceRequest, Service> getServiceSettings() {
return getServiceSettings;
}
/** Returns the object with the settings used for calls to listServices. */
public PagedCallSettings<ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>
listServicesSettings() {
return listServicesSettings;
}
/** Returns the object with the settings used for calls to batchEnableServices. */
public UnaryCallSettings<BatchEnableServicesRequest, Operation> batchEnableServicesSettings() {
return batchEnableServicesSettings;
}
/** Returns the object with the settings used for calls to batchEnableServices. */
public OperationCallSettings<
BatchEnableServicesRequest, BatchEnableServicesResponse, OperationMetadata>
batchEnableServicesOperationSettings() {
return batchEnableServicesOperationSettings;
}
/** Returns the object with the settings used for calls to batchGetServices. */
public UnaryCallSettings<BatchGetServicesRequest, BatchGetServicesResponse>
batchGetServicesSettings() {
return batchGetServicesSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ServiceUsageStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcServiceUsageStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "serviceusage.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "serviceusage.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ServiceUsageStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ServiceUsageStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
enableServiceSettings = settingsBuilder.enableServiceSettings().build();
enableServiceOperationSettings = settingsBuilder.enableServiceOperationSettings().build();
disableServiceSettings = settingsBuilder.disableServiceSettings().build();
disableServiceOperationSettings = settingsBuilder.disableServiceOperationSettings().build();
getServiceSettings = settingsBuilder.getServiceSettings().build();
listServicesSettings = settingsBuilder.listServicesSettings().build();
batchEnableServicesSettings = settingsBuilder.batchEnableServicesSettings().build();
batchEnableServicesOperationSettings =
settingsBuilder.batchEnableServicesOperationSettings().build();
batchGetServicesSettings = settingsBuilder.batchGetServicesSettings().build();
}
/** Builder for ServiceUsageStubSettings. */
public static class Builder extends StubSettings.Builder<ServiceUsageStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<EnableServiceRequest, Operation> enableServiceSettings;
private final OperationCallSettings.Builder<
EnableServiceRequest, EnableServiceResponse, OperationMetadata>
enableServiceOperationSettings;
private final UnaryCallSettings.Builder<DisableServiceRequest, Operation>
disableServiceSettings;
private final OperationCallSettings.Builder<
DisableServiceRequest, DisableServiceResponse, OperationMetadata>
disableServiceOperationSettings;
private final UnaryCallSettings.Builder<GetServiceRequest, Service> getServiceSettings;
private final PagedCallSettings.Builder<
ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>
listServicesSettings;
private final UnaryCallSettings.Builder<BatchEnableServicesRequest, Operation>
batchEnableServicesSettings;
private final OperationCallSettings.Builder<
BatchEnableServicesRequest, BatchEnableServicesResponse, OperationMetadata>
batchEnableServicesOperationSettings;
private final UnaryCallSettings.Builder<BatchGetServicesRequest, BatchGetServicesResponse>
batchGetServicesSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
enableServiceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
enableServiceOperationSettings = OperationCallSettings.newBuilder();
disableServiceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
disableServiceOperationSettings = OperationCallSettings.newBuilder();
getServiceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listServicesSettings = PagedCallSettings.newBuilder(LIST_SERVICES_PAGE_STR_FACT);
batchEnableServicesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchEnableServicesOperationSettings = OperationCallSettings.newBuilder();
batchGetServicesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
enableServiceSettings,
disableServiceSettings,
getServiceSettings,
listServicesSettings,
batchEnableServicesSettings,
batchGetServicesSettings);
initDefaults(this);
}
protected Builder(ServiceUsageStubSettings settings) {
super(settings);
enableServiceSettings = settings.enableServiceSettings.toBuilder();
enableServiceOperationSettings = settings.enableServiceOperationSettings.toBuilder();
disableServiceSettings = settings.disableServiceSettings.toBuilder();
disableServiceOperationSettings = settings.disableServiceOperationSettings.toBuilder();
getServiceSettings = settings.getServiceSettings.toBuilder();
listServicesSettings = settings.listServicesSettings.toBuilder();
batchEnableServicesSettings = settings.batchEnableServicesSettings.toBuilder();
batchEnableServicesOperationSettings =
settings.batchEnableServicesOperationSettings.toBuilder();
batchGetServicesSettings = settings.batchGetServicesSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
enableServiceSettings,
disableServiceSettings,
getServiceSettings,
listServicesSettings,
batchEnableServicesSettings,
batchGetServicesSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.enableServiceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.disableServiceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.getServiceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.listServicesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.batchEnableServicesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.batchGetServicesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.enableServiceOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<EnableServiceRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(EnableServiceResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.disableServiceOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DisableServiceRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(DisableServiceResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.batchEnableServicesOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<BatchEnableServicesRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(
BatchEnableServicesResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to enableService. */
public UnaryCallSettings.Builder<EnableServiceRequest, Operation> enableServiceSettings() {
return enableServiceSettings;
}
/** Returns the builder for the settings used for calls to enableService. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
EnableServiceRequest, EnableServiceResponse, OperationMetadata>
enableServiceOperationSettings() {
return enableServiceOperationSettings;
}
/** Returns the builder for the settings used for calls to disableService. */
public UnaryCallSettings.Builder<DisableServiceRequest, Operation> disableServiceSettings() {
return disableServiceSettings;
}
/** Returns the builder for the settings used for calls to disableService. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
DisableServiceRequest, DisableServiceResponse, OperationMetadata>
disableServiceOperationSettings() {
return disableServiceOperationSettings;
}
/** Returns the builder for the settings used for calls to getService. */
public UnaryCallSettings.Builder<GetServiceRequest, Service> getServiceSettings() {
return getServiceSettings;
}
/** Returns the builder for the settings used for calls to listServices. */
public PagedCallSettings.Builder<
ListServicesRequest, ListServicesResponse, ListServicesPagedResponse>
listServicesSettings() {
return listServicesSettings;
}
/** Returns the builder for the settings used for calls to batchEnableServices. */
public UnaryCallSettings.Builder<BatchEnableServicesRequest, Operation>
batchEnableServicesSettings() {
return batchEnableServicesSettings;
}
/** Returns the builder for the settings used for calls to batchEnableServices. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
BatchEnableServicesRequest, BatchEnableServicesResponse, OperationMetadata>
batchEnableServicesOperationSettings() {
return batchEnableServicesOperationSettings;
}
/** Returns the builder for the settings used for calls to batchGetServices. */
public UnaryCallSettings.Builder<BatchGetServicesRequest, BatchGetServicesResponse>
batchGetServicesSettings() {
return batchGetServicesSettings;
}
@Override
public ServiceUsageStubSettings build() throws IOException {
return new ServiceUsageStubSettings(this);
}
}
}
| |
package edu.harvard.iq.dataverse;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OrderBy;
import javax.persistence.Table;
import javax.persistence.Version;
import org.hibernate.validator.constraints.NotBlank;
import javax.validation.constraints.Pattern;
/**
*
* @author skraffmiller
*/
@Table(indexes = {@Index(columnList="datafile_id"), @Index(columnList="datasetversion_id")} )
@Entity
public class FileMetadata implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(FileMetadata.class.getCanonicalName());
@Pattern(regexp="^[^:<>;#/\"\\*\\|\\?\\\\]*$", message = "File Name cannot contain any of the following characters: \\ / : * ? \" < > | ; # .")
@NotBlank(message = "Please specify a file name.")
@Column( nullable=false )
private String label = "";
@Column(columnDefinition = "TEXT")
private String description = "";
private boolean restricted;
@ManyToOne
@JoinColumn(nullable=false)
private DatasetVersion datasetVersion;
@ManyToOne
@JoinColumn(nullable=false)
private DataFile dataFile;
/**
* Creates a copy of {@code this}, with identical business logic fields.
* E.g., {@link #label} would be duplicated; {@link #version} will not.
*
* @return A copy of {@code this}, except for the DB-related data.
*/
public FileMetadata createCopy() {
FileMetadata fmd = new FileMetadata();
fmd.setCategories(new LinkedList<>(getCategories()) );
fmd.setDataFile( getDataFile() );
fmd.setDatasetVersion( getDatasetVersion() );
fmd.setDescription( getDescription() );
fmd.setLabel( getLabel() );
fmd.setRestricted( isRestricted() );
return fmd;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isRestricted() {
return restricted;
}
public void setRestricted(boolean restricted) {
this.restricted = restricted;
}
/*
* File Categories to which this version of the DataFile belongs:
*/
@ManyToMany
@JoinTable(indexes = {@Index(columnList="filecategories_id"),@Index(columnList="filemetadatas_id")})
@OrderBy("name")
private List<DataFileCategory> fileCategories;
public List<DataFileCategory> getCategories() {
return fileCategories;
}
public void setCategories(List<DataFileCategory> fileCategories) {
this.fileCategories = fileCategories;
}
public void addCategory(DataFileCategory category) {
if (fileCategories == null) {
fileCategories = new ArrayList<>();
}
fileCategories.add(category);
}
public List<String> getCategoriesByName() {
ArrayList<String> ret = new ArrayList<>();
if (fileCategories != null) {
for (int i = 0; i < fileCategories.size(); i++) {
ret.add(fileCategories.get(i).getName());
}
}
return ret;
}
// alternative, experimental method:
public void setCategoriesByName(List<String> newCategoryNames) {
setCategories(null); // ?? TODO: investigate!
if (newCategoryNames != null) {
for (int i = 0; i < newCategoryNames.size(); i++) {
// Dataset.getCategoryByName() will check if such a category
// already exists for the parent dataset; it will be created
// if not. The method will return null if the supplied
// category name is null or empty. -- L.A. 4.0 beta 10
DataFileCategory fileCategory = null;
try {
// Using "try {}" to catch any null pointer exceptions,
// just in case:
fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryNames.get(i));
} catch (Exception ex) {
fileCategory = null;
}
if (fileCategory != null) {
this.addCategory(fileCategory);
fileCategory.addFileMetadata(this);
}
}
}
}
/*
note that this version only *adds* new categories, but does not
remove the ones that has been unchecked!
public void setCategoriesByName(List<String> newCategoryNames) {
if (newCategoryNames != null) {
Collection<String> oldCategoryNames = getCategoriesByName();
for (int i = 0; i < newCategoryNames.size(); i++) {
if (!oldCategoryNames.contains(newCategoryNames.get(i))) {
// Dataset.getCategoryByName() will check if such a category
// already exists for the parent dataset; it will be created
// if not. The method will return null if the supplied
// category name is null or empty. -- L.A. 4.0 beta 10
DataFileCategory fileCategory = null;
try {
// Using "try {}" to catch any null pointer exceptions,
// just in case:
fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryNames.get(i));
} catch (Exception ex) {
fileCategory = null;
}
if (fileCategory != null) {
this.addCategory(fileCategory);
fileCategory.addFileMetadata(this);
}
}
}
}
}
*/
public void addCategoryByName(String newCategoryName) {
if (newCategoryName != null && !newCategoryName.equals("")) {
Collection<String> oldCategoryNames = getCategoriesByName();
if (!oldCategoryNames.contains(newCategoryName)) {
DataFileCategory fileCategory = null;
// Dataset.getCategoryByName() will check if such a category
// already exists for the parent dataset; it will be created
// if not. The method will return null if the supplied
// category name is null or empty. -- L.A. 4.0 beta 10
try {
// Using "try {}" to catch any null pointer exceptions,
// just in case:
fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryName);
} catch (Exception ex) {
fileCategory = null;
}
if (fileCategory != null) {
logger.log(Level.FINE, "Found file category for {0}", newCategoryName);
this.addCategory(fileCategory);
fileCategory.addFileMetadata(this);
} else {
logger.log(Level.INFO, "Could not find file category for {0}", newCategoryName);
}
} else {
// don't do anything - this file metadata already belongs to
// this category.
}
}
}
public DatasetVersion getDatasetVersion() {
return datasetVersion;
}
public void setDatasetVersion(DatasetVersion datasetVersion) {
this.datasetVersion = datasetVersion;
}
public DataFile getDataFile() {
return dataFile;
}
public void setDataFile(DataFile dataFile) {
this.dataFile = dataFile;
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
/**
* Getter for property id.
* @return Value of property id.
*/
public Long getId() {
return this.id;
}
/**
* Setter for property id.
* @param id New value of property id.
*/
public void setId(Long id) {
this.id = id;
}
@Version
private Long version;
/**
* Getter for property version.
* @return Value of property version.
*/
public Long getVersion() {
return this.version;
}
/**
* Setter for property version.
* @param version New value of property version.
*/
public void setVersion(Long version) {
this.version = version;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
if (!(object instanceof FileMetadata)) {
return false;
}
FileMetadata other = (FileMetadata) object;
return !((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id)));
}
/*
* An experimental method for comparing 2 file metadatas *by content*; i.e.,
* this would be for checking 2 metadatas from 2 different versions, to
* determine if any of the actual metadata fields have changed between
* versions.
*/
public boolean contentEquals(FileMetadata other) {
if (other == null) {
return false;
}
if (this.getLabel() != null) {
if (!this.getLabel().equals(other.getLabel())) {
return false;
}
} else if (other.getLabel() != null) {
return false;
}
if (this.getDescription() != null) {
if (!this.getDescription().equals(other.getDescription())) {
return false;
}
} else if (other.getDescription() != null) {
return false;
}
/*
* we could also compare the sets of file categories; but since this
* functionality is for deciding whether to index an extra filemetadata,
* we're not doing it, as of now; because the categories are not indexed
* and not displayed on the search cards.
* -- L.A. 4.0 beta12
*/
return true;
}
@Override
public String toString() {
return "edu.harvard.iq.dvn.core.study.FileMetadata[id=" + id + "]";
}
public static final Comparator<FileMetadata> compareByLabel = new Comparator<FileMetadata>() {
@Override
public int compare(FileMetadata o1, FileMetadata o2) {
return o1.getLabel().toUpperCase().compareTo(o2.getLabel().toUpperCase());
}
};
}
| |
package org.openntf.domino.tests.ntf;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedExceptionAction;
import java.util.Date;
import java.util.Map;
import java.util.UUID;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import lotus.domino.Database;
import lotus.domino.DateRange;
import lotus.domino.DateTime;
import lotus.domino.Document;
import lotus.domino.DocumentCollection;
import lotus.domino.DxlExporter;
import lotus.domino.Item;
import lotus.domino.Name;
import lotus.domino.NoteCollection;
import lotus.domino.NotesException;
import lotus.domino.NotesFactory;
import lotus.domino.NotesThread;
import lotus.domino.Session;
/*
* @author NTF
* This runnable class is designed to test the behavior of the lotus.domino API with regard to recycling objects.
* If we fail to recycle the date objects and run multiple threads, we get crashes.
* If we fail to recycle non-date objects and run multiple threads, we get Backend Out of Memory Handles messages
*
*/
public class NotesRunner implements Runnable {
private static Method getCppMethod;
@SuppressWarnings("unused")
private static int bitMode;
private static Field cpp_field;
private static Field wr_field;
private static ThreadLocal<Long> sessionid = new ThreadLocal<Long>() {
/* (non-Javadoc)
* @see java.lang.ThreadLocal#initialValue()
*/
@Override
protected Long initialValue() {
return 0L;
}
/* (non-Javadoc)
* @see java.lang.ThreadLocal#set(java.lang.Object)
*/
@Override
public void set(final Long value) {
super.set(value);
System.out.println("Session id: " + value);
}
};
private static ThreadLocal<Long> minid = new ThreadLocal<Long>() {
/* (non-Javadoc)
* @see java.lang.ThreadLocal#initialValue()
*/
@Override
protected Long initialValue() {
return Long.MAX_VALUE;
}
@Override
public void set(final Long value) {
if (value < super.get()) {
// System.out.println("New Min is " + value);
super.set(value);
if (sessionid.get() > 0) {
long delta = (value - sessionid.get()) >> 3; //difference divided by 8 (is a negative number)
// if (delta > 8192l) {
System.out.println(Thread.currentThread().getName() + " is New min: " + value + " session diff: " + delta
+ " session: " + sessionid.get());
// }
} else {
System.out.println("Setting up session id as min");
}
}
};
};
private static ThreadLocal<Long> maxid = new ThreadLocal<Long>() {
/* (non-Javadoc)
* @see java.lang.ThreadLocal#initialValue()
*/
@Override
protected Long initialValue() {
return 0L;
}
@Override
public void set(final Long value) {
// if (value % 8 != 0) {
// System.out.println("Encountered a cppid that's not a multiple of 8!");
// }
if (value > super.get()) {
super.set(value);
if (sessionid.get() > 0) {
long delta = (value - sessionid.get()) >> 3; //difference divided by 8
if (delta > 8192l) {
System.out.println(Thread.currentThread().getName() + " is New max: " + value + " session diff: " + delta
+ " session: " + sessionid.get());
}
} else {
System.out.println("Setting up session id as max");
}
}
};
};
static {
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
String bitModeRaw = System.getProperty("com.ibm.vm.bitmode");
try {
int mode = Integer.valueOf(bitModeRaw);
System.out.println("Set bitmode to " + mode);
} catch (Exception e) {
e.printStackTrace();
}
getCppMethod = lotus.domino.local.NotesBase.class.getDeclaredMethod("GetCppObj", (Class<?>[]) null);
getCppMethod.setAccessible(true);
wr_field = lotus.domino.local.NotesBase.class.getDeclaredField("weakObject");
wr_field.setAccessible(true);
Class<?> clazz = wr_field.getType();
cpp_field = clazz.getDeclaredField("cpp_object");
cpp_field.setAccessible(true);
return null;
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
public static long getLotusId(final lotus.domino.Base base) {
try {
Object o = wr_field.get(base);
long result = (Long) cpp_field.get(o);
maxid.set(result);
minid.set(result);
return result;
} catch (Exception e) {
return 0L;
}
}
public static void incinerate(final lotus.domino.Base base) {
try {
base.recycle();
} catch (NotesException e) {
}
}
@SuppressWarnings("unused")
private static Map<Long, Byte> idMap = new ConcurrentHashMap<Long, Byte>();
public static void main(final String[] args) throws InterruptedException {
try {
// Properties props = System.getProperties();
// for (Object key : props.keySet()) {
// Object value = props.get(key);
// System.out.println(String.valueOf(key) + " : " + String.valueOf(value));
// }
NotesThread.sinitThread();
for (int i = 0; i < 1; i++) {
NotesRunner run = new NotesRunner();
NotesThread nt = new NotesThread(run, "Thread " + i);
nt.start();
Thread.sleep(500);
}
} finally {
NotesThread.stermThread();
}
}
public NotesRunner() {
}
public void run1(final Session session) throws NotesException {
Long sessId = getLotusId(session);
sessionid.set(sessId);
Database db = session.getDatabase("", "names.nsf");
System.out.println("Db id:" + getLotusId(db));
Name name = null;
int i = 0;
try {
for (i = 0; i <= 100000; i++) {
name = session.createName(UUID.randomUUID().toString());
getLotusId(name);
DateTime dt = session.createDateTime(new Date());
getLotusId(dt);
DateTime end = session.createDateTime(new Date());
getLotusId(end);
DateRange dr = session.createDateRange(dt, end);
getLotusId(dr);
Document doc = db.createDocument();
getLotusId(doc);
Item i1 = doc.replaceItemValue("Foo", dr);
getLotusId(i1);
Item i2 = doc.replaceItemValue("Bar", dr.getText());
getLotusId(i2);
Item i3 = doc.replaceItemValue("Blah", dr.getStartDateTime().getLocalTime());
getLotusId(i3);
lotus.domino.ColorObject color = session.createColorObject();
getLotusId(color);
color.setRGB(128, 128, 128);
Item i4 = doc.replaceItemValue("color", color.getNotesColor());
getLotusId(i4);
i1.recycle();
i2.recycle();
i3.recycle();
i4.recycle();
DateTime create = doc.getCreated();
getLotusId(create);
@SuppressWarnings("unused")
String lc = create.getLocalTime();
// if (i % 10000 == 0) {
// System.out.println(Thread.currentThread().getName() + " Name " + i + " is " + name.getCommon() + " "
// + "Local time is " + lc + " " + dr.getText());
// }
dr.recycle();
doc.recycle();
dt.recycle();
end.recycle();
create.recycle();
color.recycle();
name.recycle();
}
} catch (Throwable t) {
t.printStackTrace();
System.out.println("Exception at loop point " + i);
}
}
public void run2(final Session session) throws NotesException {
Database db = session.getDatabase("", "log.nsf");
Document doc = db.createDocument();
Item names = doc.replaceItemValue("Names", "CN=Nathan T Freeman/O=REDPILL");
names.setAuthors(true);
doc.replaceItemValue("form", "test");
doc.save(true);
String nid = doc.getNoteID();
doc.recycle();
doc = db.getDocumentByID(nid);
Vector<Double> numbers = new Vector<Double>();
numbers.add(new Double(1));
numbers.add(new Double(2));
doc.replaceItemValue("Names", numbers);
doc.save(true);
doc.recycle();
doc = db.getDocumentByID(nid);
names = doc.getFirstItem("Names");
System.out.println("Names is " + names.getType() + " with " + names.isNames() + " and " + names.isAuthors() + " and value "
+ names.getText());
doc.recycle();
db.recycle();
}
public void run4(final Session session) throws NotesException {
Database db = session.getDatabase("", "events4.nsf");
NoteCollection cacheNC = db.createNoteCollection(false);
cacheNC.setSelectDocuments(true);
cacheNC.buildCollection();
cacheNC.recycle();
DocumentCollection cacheDc = db.getAllDocuments();
Document cacheDoc = cacheDc.getFirstDocument();
cacheDoc.recycle();
cacheDc.recycle();
db.recycle();
db = session.getDatabase("", "events4.nsf");
DocumentCollection dc = db.getAllDocuments();
Document doc = dc.getFirstDocument();
Document nextDoc = null;
int dcCount = dc.getCount();
int j = 0;
String[] dcUnids = new String[dcCount];
long dcStart = System.nanoTime();
while (doc != null) {
nextDoc = dc.getNextDocument(doc);
dcUnids[j++] = doc.getUniversalID();
doc.recycle();
doc = nextDoc;
}
System.out.println("DocumentCollection strategy got UNIDs for " + dcCount + " docs in " + (System.nanoTime() - dcStart) / 1000
+ "us");
dc.recycle();
db.recycle();
db = session.getDatabase("", "events4.nsf");
NoteCollection nc3 = db.createNoteCollection(false);
nc3.setSelectDocuments(true);
nc3.buildCollection();
int nc3Count = nc3.getCount();
String[] nc3Unids = new String[nc3Count];
int[] nids = nc3.getNoteIDs();
int k = 0;
long nc3Start = System.nanoTime();
for (int id : nids) {
nc3Unids[k++] = nc3.getUNID(Integer.toHexString(id));
}
System.out.println("NoteCollection strategy ints got UNIDs for " + nc3Count + " notes in " + (System.nanoTime() - nc3Start) / 1000
+ "us");
nc3.recycle();
db.recycle();
db = session.getDatabase("", "events4.nsf");
NoteCollection nc = db.createNoteCollection(false);
nc.setSelectDocuments(true);
nc.buildCollection();
int ncCount = nc.getCount();
String[] ncUnids = new String[ncCount];
String nid = nc.getFirstNoteID();
long ncStart = System.nanoTime();
for (int i = 0; i < ncCount; i++) {
ncUnids[i] = nc.getUNID(nid);
nid = nc.getNextNoteID(nid);
}
System.out.println("NoteCollection strategy first/next got UNIDs for " + ncCount + " notes in " + (System.nanoTime() - ncStart)
/ 1000 + "us");
nc.recycle();
db.recycle();
db = session.getDatabase("", "events4.nsf");
NoteCollection nc2 = db.createNoteCollection(false);
nc2.setSelectDocuments(true);
nc2.buildCollection();
int nc2Count = nc2.getCount();
String[] nc2Unids = new String[nc2Count];
nid = nc2.getFirstNoteID();
long nc2Start = System.nanoTime();
for (int i = 0; i < nc2Count; i++) {
Document nc2doc = db.getDocumentByID(nid);
nc2Unids[i] = nc2doc.getUniversalID();
nc2doc.recycle();
nid = nc2.getNextNoteID(nid);
}
System.out.println("NoteCollection strategy doc got UNIDs for " + nc2Count + " notes in " + (System.nanoTime() - nc2Start) / 1000
+ "us");
nc2.recycle();
db.recycle();
}
public void run3(final Session session) throws NotesException {
Database db = session.getDatabase("", "index.ntf");
NoteCollection nc = db.createNoteCollection(false);
nc.setSelectIcon(true);
nc.setSelectAcl(true);
nc.selectAllDesignElements(true);
nc.buildCollection();
DxlExporter export = session.createDxlExporter();
export.setForceNoteFormat(true);
export.setRichTextOption(DxlExporter.DXLRICHTEXTOPTION_RAW);
String dxl = export.exportDxl(nc);
nc.recycle();
export.recycle();
db.recycle();
try {
PrintWriter out = new PrintWriter("c:\\data\\index.dxl");
out.println(dxl);
out.close();
} catch (Throwable t) {
t.printStackTrace();
}
}
@Override
public void run() {
try {
System.out.println("Starting NotesRunner");
Session session = NotesFactory.createSession();
run4(session);
session.recycle();
} catch (Throwable t) {
t.printStackTrace();
}
System.out.println("FINI!");
}
}
| |
package com.pennapps.labs.pennmobile.adapters;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.cardview.widget.CardView;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.components.LimitLine;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.pennapps.labs.pennmobile.R;
import com.pennapps.labs.pennmobile.classes.LaundryRoom;
import com.pennapps.labs.pennmobile.classes.LaundryUsage;
import com.pennapps.labs.pennmobile.classes.MachineDetail;
import com.pennapps.labs.pennmobile.classes.MachineList;
import com.pennapps.labs.pennmobile.classes.Machines;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by Jackie on 2017-10-21.
*/
public class LaundryRoomAdapter extends RecyclerView.Adapter<LaundryRoomAdapter.CustomViewHolder> {
Context mContext;
ArrayList<LaundryRoom> mRooms;
List<LaundryUsage> mRoomsData;
SharedPreferences sp;
boolean isHome;
public LaundryRoomAdapter(Context context, ArrayList<LaundryRoom> rooms, List<LaundryUsage> roomsData, boolean isHome) {
mContext = context;
mRooms = rooms;
mRoomsData = roomsData;
this.isHome = isHome;
sp = PreferenceManager.getDefaultSharedPreferences(mContext);
}
@Override
public LaundryRoomAdapter.CustomViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.laundry_room_item, parent, false);
return new CustomViewHolder(view, mContext, mRooms, mRoomsData);
}
@Override
public void onBindViewHolder(LaundryRoomAdapter.CustomViewHolder holder, int position) {
LaundryRoom room = mRooms.get(position);
if (isHome) {
holder.name.setVisibility(View.GONE);
holder.title.setVisibility(View.GONE);
holder.lineChart.setVisibility(View.GONE);
holder.layout.setBackgroundResource(0);
LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) holder.layout.getLayoutParams();
layoutParams.setMargins(0, 0, 0, 0);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
layoutParams.setMarginStart(0);
}
holder.layout.setLayoutParams(layoutParams);
CardView container = (CardView) holder.dryerRecyclerView.getParent();
ConstraintLayout.LayoutParams params = (ConstraintLayout.LayoutParams) container.getLayoutParams();
params.setMargins(0,0,0,0);
container.setLayoutParams(params);
container = (CardView) holder.washerRecyclerView.getParent();
params = (ConstraintLayout.LayoutParams) container.getLayoutParams();
params.setMargins(0,0,0,0);
container.setLayoutParams(params);
}
// update name of laundry room and type of machine
int hall_no = room.getId();
String location = sp.getString(hall_no + mContext.getString(R.string.location), "");
String roomName = room.getName();
holder.title.setText(roomName);
holder.name.setText(location.toUpperCase());
Machines machines = room.getMachines();
List<MachineDetail> machineDetails = machines.getMachineDetailList();
List<MachineDetail> washers = new ArrayList<>();
List<MachineDetail> dryers = new ArrayList<>();
for (MachineDetail machineDetail : machineDetails) {
if (machineDetail.getType().equals("washer")) {
washers.add(machineDetail);
} else {
dryers.add(machineDetail);
}
}
// add washer info
// recycler view for the time remaining
LaundryMachineAdapter washerAdapter = new LaundryMachineAdapter(mContext, washers, mContext.getString(R.string.washer), roomName);
holder.washerRecyclerView.setLayoutManager(new LinearLayoutManager(mContext, LinearLayoutManager.HORIZONTAL, false));
holder.washerRecyclerView.setAdapter(washerAdapter);
// add dryer info
// recycler view for the time remaining
LaundryMachineAdapter adapter = new LaundryMachineAdapter(mContext, dryers, mContext.getString(R.string.dryer), roomName);
holder.dryerRecyclerView.setLayoutManager(new LinearLayoutManager(mContext, LinearLayoutManager.HORIZONTAL, false));
holder.dryerRecyclerView.setAdapter(adapter);
// overview of how many machines are available
MachineList washerList = room.getMachines().getWashers();
int openWashers = washerList.getOpen();
int runningWashers = washerList.getRunning();
int offlineWashers = washerList.getOffline();
int outOfOrderWashers = washerList.getOutOfOrder();
int totalWashers = openWashers + runningWashers + offlineWashers + outOfOrderWashers;
holder.washerAvailability.setText(openWashers + " of " + totalWashers + " Open");
MachineList dryerList = room.getMachines().getDryers();
int openDryers = dryerList.getOpen();
int runningDryers = dryerList.getRunning();
int offlineDryers = dryerList.getOffline();
int outOfOrderDryers = dryerList.getOutOfOrder();
int totalDryers = openDryers + runningDryers + offlineDryers + outOfOrderDryers;
holder.dryerAvailability.setText(openDryers + " of " + totalDryers + " Open");
if (mRoomsData != null) {
if (mRoomsData.size() > position) createLaundryChart(holder, position);
}
}
private void createLaundryChart(LaundryRoomAdapter.CustomViewHolder holder, int position) {
// Laundry availability chart
LaundryUsage roomUsage = mRoomsData.get(position);
List<Double> washerData = roomUsage.getWasherData().getAdjustedData();
List<Double> dryerData = roomUsage.getDryerData().getAdjustedData();
List<Double> roomData = new ArrayList<>();
for (int i = 0; i < washerData.size(); i++) {
double average = (washerData.get(i).doubleValue() + dryerData.get(i).doubleValue()) / 2;
roomData.add(average);
}
Double[] roomDataArr = new Double[roomData.size()];
roomDataArr = roomData.toArray(roomDataArr);
List<Entry> graphEntries = new ArrayList<>();
for (int i = 0; i < roomDataArr.length; i++) {
graphEntries.add(new Entry(i, roomDataArr[i].floatValue()));
}
// add entries to dataset
LineDataSet dataSet = new LineDataSet(graphEntries, "Traffic");
dataSet.setDrawValues(false);
dataSet.setDrawCircles(false);
dataSet.setDrawFilled(true);
// curvy line
dataSet.setMode(LineDataSet.Mode.CUBIC_BEZIER);
LineData lineData = new LineData(dataSet);
// set data to chart
LineChart laundryChart = holder.lineChart;
laundryChart.setData(lineData);
// styling of chart
laundryChart.setTouchEnabled(false);
laundryChart.getXAxis().setDrawGridLines(false);
laundryChart.getXAxis().setPosition(XAxis.XAxisPosition.BOTTOM);
laundryChart.getXAxis().setValueFormatter(new TimeXAxisValueFormatter());
laundryChart.getAxisRight().setEnabled(false);
laundryChart.getAxisLeft().setEnabled(false);
laundryChart.setDrawBorders(false);
laundryChart.setDescription(null);
int hourOfDay = new GregorianCalendar().get(Calendar.HOUR_OF_DAY);
// highlight time of day
LimitLine ll = new LimitLine(hourOfDay);
laundryChart.getXAxis().addLimitLine(ll);
laundryChart.getAxisLeft().setAxisMinimum(0);
int maxIndex = roomData.indexOf(Collections.max(roomData));
laundryChart.getAxisLeft().setAxisMaximum(roomData.get(maxIndex).floatValue() + roomData.get(maxIndex).floatValue() / 10);
laundryChart.fitScreen();
Legend legend = laundryChart.getLegend();
legend.setEnabled(false);
laundryChart.invalidate();
}
@Override
public int getItemCount() {
return mRooms.size();
}
public class CustomViewHolder extends RecyclerView.ViewHolder {
Context mContext;
ArrayList<LaundryRoom> mRooms;
@BindView(R.id.laundry_room_title)
TextView name;
@BindView(R.id.fav_laundry_room_name)
TextView title;
@BindView(R.id.washer_availability)
TextView washerAvailability;
@BindView(R.id.dryer_availability)
TextView dryerAvailability;
@BindView(R.id.laundry_washer_machine_list)
RecyclerView washerRecyclerView;
@BindView(R.id.laundry_dryer_machine_list)
RecyclerView dryerRecyclerView;
@BindView(R.id.laundry_availability_chart)
LineChart lineChart;
@BindView(R.id.laundry_card)
View layout;
public CustomViewHolder(View view, Context context, ArrayList<LaundryRoom> rooms, List<LaundryUsage> roomsData) {
super(view);
mContext = context;
mRooms = rooms;
ButterKnife.bind(this, view);
}
}
}
| |
package com.saffrontech.vertx;
import io.vertx.core.Vertx;
import io.vertx.core.eventbus.Message;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.handler.sockjs.BridgeOptions;
import io.vertx.ext.web.handler.sockjs.PermittedOptions;
import io.vertx.ext.web.handler.sockjs.SockJSHandler;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.net.URI;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
import static org.junit.Assert.*;
/**
* Created by beders on 6/25/15.
*/
public class EventBusBridgeTest {
static Vertx vertx;
LongAdder counter = new LongAdder();
EventBusBridge bridge;
@BeforeClass
public static void createServer() throws InterruptedException {
vertx = Vertx.vertx();
CountDownLatch latch = new CountDownLatch(1);
Router router = Router.router(vertx);
// events specific to THOPs are made available over the bridge
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
BridgeOptions options = new BridgeOptions();
options.addOutboundPermitted(new PermittedOptions().setAddress("test")).
addInboundPermitted(new PermittedOptions().setAddress("test")).
addOutboundPermitted(new PermittedOptions().setAddress("end")).
addInboundPermitted(new PermittedOptions().setAddress("end")).
addInboundPermitted(new PermittedOptions().setAddress("reply")).
addOutboundPermitted(new PermittedOptions().setAddress("replyTest"));
sockJSHandler.bridge(options);
router.route("/bridge/*").handler(sockJSHandler);
// for reply test
vertx.eventBus().consumer("reply", msg -> {
vertx.eventBus().send("replyTest", "replyToMe", reply -> {
assertEquals("bubu", reply.result().body().toString());
reply.result().reply("ok", replyOfreply -> {
assertEquals("roger", replyOfreply.result().body().toString());
});
msg.reply("ok");
vertx.eventBus().send("test", "ok");
});
});
vertx.createHttpServer().requestHandler(router::accept).listen(8765, (res) -> {
latch.countDown();
});
latch.await();
System.out.println("Server listening on port 8765");
}
@AfterClass
public static void stopServer() {
vertx.close();
}
@After
public void cleanUp() {
if (bridge != null)
bridge.close();
}
@Test
public void testConnect() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.close();
System.out.println("Closing");
latch.countDown();
});
latch.await();
}
@Test
public void testSend() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", msg -> {
assertNotNull(msg);
latch.countDown();
});
eb.send("test", "hello");
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
}
@Test
public void testPublish() throws Exception {
CountDownLatch latch = new CountDownLatch(2);
LongAdder adder = new LongAdder();
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", msg -> {
assertNotNull(msg);
adder.increment();
latch.countDown();
}).registerHandler("test", msg -> {
assertNotNull(msg);
adder.increment();
latch.countDown();
});
eb.publish("test", "hello");
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(adder.longValue(), 2);
}
@Test
public void testSendWithReply() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
LongAdder adder = new LongAdder();
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("replyTest", msg -> {
msg.reply("bubu");
adder.increment();
});
eb.registerHandler("test", msg -> {
assertEquals("ok", msg.body().toString());
latch.countDown();
});
eb.send("reply", "to me", reply -> {
assertEquals("ok", reply.body().toString());
adder.increment();
});
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(2, adder.longValue());
}
@Test
public void testSendWithReply2() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
LongAdder adder = new LongAdder();
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("replyTest", msg -> {
msg.reply("bubu", reply -> {
reply.result().reply("roger");
adder.increment();
});
adder.increment();
});
eb.registerHandler("test", msg -> {
assertEquals("ok", msg.body().toString());
latch.countDown();
});
eb.send("reply","to me", reply -> {
assertEquals("ok", reply.body().toString());
adder.increment();
});
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(3, adder.longValue());
}
@Test
public void testPublishJson() throws Exception {
CountDownLatch latch = new CountDownLatch(2);
LongAdder adder = new LongAdder();
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", msg -> {
assertNotNull(msg);
assertEquals("world", msg.asJson().body().getString("hello"));
adder.increment();
latch.countDown();
}).registerHandler("test", (EventBusBridge.EventBusMessage<JsonObject> msg) -> {
assertNotNull(msg);
assertEquals("world", msg.body().getString("hello"));
adder.increment();
latch.countDown();
});
eb.publish("test", new JsonObject().put("hello", "world"));
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(adder.longValue(), 2);
}
@Test
public void testRegisterHandler() throws Exception {
CountDownLatch latch = new CountDownLatch(3);
LongAdder adder = new LongAdder();
MessageHandler mh = msg -> {
System.out.println("Msg:" + adder.longValue());
adder.increment();
latch.countDown();
};
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", mh);
eb.registerHandler("test", mh);
eb.registerHandler("test", mh);
eb.publish("test", new JsonObject().put("hello", "world"));
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(adder.longValue(), 3);
}
@Test
public void testUnregisterHandler() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
System.out.println("EventBusBridgeTest.testUnregisterHandler");
EventHandler<?> handleHello = this::handleHello;
eb.registerHandler("test", handleHello);
eb.registerHandler("end", msg -> {handleHello.unregister("test",eb); latch.countDown();});
eb.send("test", "hello");
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
assertEquals(1, counter.longValue());
}
public void handleHello(Message<?> message, EventBusBridge eb) {
System.out.println("EventBusBridgeTest.handleHello");
assertNotNull(message);
if (counter.longValue() == 0) {
eb.send("end", "test");
}
eb.send("test", "hello again"); // this should not be seen by handleHello
counter.increment();
}
@Test
public void testUnregisterSelf() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", msg -> {
System.out.println("EventBusBridgeTest.testUnregisterSelf");
assertEquals("hello", msg.body());
msg.unregister();
eb.registerHandler("test", msg2 -> {
latch.countDown();
});
});
eb.send("test", "hello");
eb.send("test", "second hello"); // should not be called
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
}
@Test
public void testHandlers() throws Exception {
CountDownLatch latch = new CountDownLatch(2);
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.registerHandler("test", msg -> {
System.out.println(msg);
assertNotNull(msg);
latch.countDown();
});
eb.registerHandler("test", (msg, bus) -> { assertEquals(eb, bus); latch.countDown(); });
eb.publish("test", "hello");
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
}
@Test
public void testClose() throws Exception {
bridge = EventBusBridge.connect(URI.create("http://localhost:8765/bridge"), eb -> {
eb.close();
assertFalse(eb.isOpen());
});
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service;
import java.io.*;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.*;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.CompactionManager;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.Table;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.CompactionIterator.CompactedRow;
import org.apache.cassandra.io.ICompactSerializer;
import org.apache.cassandra.io.IndexSummary;
import org.apache.cassandra.io.SSTableReader;
import org.apache.cassandra.streaming.StreamOut;
import org.apache.cassandra.net.IVerbHandler;
import org.apache.cassandra.net.Message;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.streaming.StreamOutManager;
import org.apache.cassandra.utils.*;
import org.apache.log4j.Logger;
/**
* AntiEntropyService encapsulates "validating" (hashing) individual column families,
* exchanging MerkleTrees with remote nodes via a TreeRequest/Response conversation,
* and then triggering repairs for disagreeing ranges.
*
* Every Tree conversation has an 'initiator', where valid trees are sent after generation
* and where the local and remote tree will rendezvous in rendezvous(cf, endpoint, tree).
* Once the trees rendezvous, a Differencer is executed and the service can trigger repairs
* for disagreeing ranges.
*
* Tree comparison and repair triggering occur in the single threaded AE_SERVICE_STAGE.
*
* The steps taken to enact a repair are as follows:
* 1. A repair is triggered via nodeprobe:
* * Nodeprobe sends TreeRequest messages to all neighbors of the target node: when a node
* receives a TreeRequest, it will perform a readonly compaction to immediately validate
* the column family.
* 2. The compaction process validates the column family by:
* * Calling Validator.prepare(), which samples the column family to determine key distribution,
* * Calling Validator.add() in order for every row in the column family,
* * Calling Validator.complete() to indicate that all rows have been added.
* * Calling complete() indicates that a valid MerkleTree has been created for the column family.
* * The valid tree is returned to the requesting node via a TreeResponse.
* 3. When a node receives a TreeResponse, it passes the tree to rendezvous(), which checks for trees to
* rendezvous with / compare to:
* * If the tree is local, it is cached, and compared to any trees that were received from neighbors.
* * If the tree is remote, it is immediately compared to a local tree if one is cached. Otherwise,
* the remote tree is stored until a local tree can be generated.
* * A Differencer object is enqueued for each comparison.
* 4. Differencers are executed in AE_SERVICE_STAGE, to compare the two trees, and perform repair via the
* streaming api.
*/
public class AntiEntropyService
{
private static final Logger logger = Logger.getLogger(AntiEntropyService.class);
// timeout for outstanding requests (48 hours)
public final static long REQUEST_TIMEOUT = 48*60*60*1000;
// singleton enforcement
public static final AntiEntropyService instance = new AntiEntropyService();
/**
* Map of column families to remote endpoints that need to rendezvous. The
* first endpoint to arrive at the rendezvous will store its tree in the
* appropriate slot of the TreePair object, and the second to arrive will
* remove the stored tree, and compare it.
*
* This map is only accessed from AE_SERVICE_STAGE, so it is not synchronized.
*/
private final Map<CFPair, ExpiringMap<InetAddress, TreePair>> trees;
/**
* Protected constructor. Use AntiEntropyService.instance.
*/
protected AntiEntropyService()
{
trees = new HashMap<CFPair, ExpiringMap<InetAddress, TreePair>>();
}
/**
* Returns the map of waiting rendezvous endpoints to trees for the given cf.
* Should only be called within AE_SERVICE_STAGE.
*
* @param cf Column family to fetch trees for.
* @return The store of trees for the given cf.
*/
private ExpiringMap<InetAddress, TreePair> rendezvousPairs(CFPair cf)
{
ExpiringMap<InetAddress, TreePair> ctrees = trees.get(cf);
if (ctrees == null)
{
ctrees = new ExpiringMap<InetAddress, TreePair>(REQUEST_TIMEOUT);
trees.put(cf, ctrees);
}
return ctrees;
}
/**
* Return all of the neighbors with whom we share data.
*/
public static Set<InetAddress> getNeighbors(String table)
{
StorageService ss = StorageService.instance;
Set<InetAddress> neighbors = new HashSet<InetAddress>();
Map<Range, List<InetAddress>> replicaSets = ss.getRangeToAddressMap(table);
for (Range range : ss.getLocalRanges(table))
{
// for every range stored locally (replica or original) collect neighbors storing copies
neighbors.addAll(replicaSets.get(range));
}
neighbors.remove(FBUtilities.getLocalAddress());
return neighbors;
}
/**
* Register a tree from the given endpoint to be compared to the appropriate trees
* in AE_SERVICE_STAGE when they become available.
*
* @param cf The column family of the tree.
* @param endpoint The endpoint which owns the given tree.
* @param tree The tree for the endpoint.
*/
private void rendezvous(CFPair cf, InetAddress endpoint, MerkleTree tree)
{
InetAddress LOCAL = FBUtilities.getLocalAddress();
// return the rendezvous pairs for this cf
ExpiringMap<InetAddress, TreePair> ctrees = rendezvousPairs(cf);
List<Differencer> differencers = new ArrayList<Differencer>();
if (LOCAL.equals(endpoint))
{
// we're registering a local tree: rendezvous with all remote trees
for (InetAddress neighbor : getNeighbors(cf.left))
{
TreePair waiting = ctrees.remove(neighbor);
if (waiting != null && waiting.right != null)
{
// the neighbor beat us to the rendezvous: queue differencing
differencers.add(new Differencer(cf, LOCAL, neighbor,
tree, waiting.right));
continue;
}
// else, the local tree is first to the rendezvous: store and wait
ctrees.put(neighbor, new TreePair(tree, null));
logger.debug("Stored local tree for " + cf + " to wait for " + neighbor);
}
}
else
{
// we're registering a remote tree: rendezvous with the local tree
TreePair waiting = ctrees.remove(endpoint);
if (waiting != null && waiting.left != null)
{
// the local tree beat us to the rendezvous: queue differencing
differencers.add(new Differencer(cf, LOCAL, endpoint,
waiting.left, tree));
}
else
{
// else, the remote tree is first to the rendezvous: store and wait
ctrees.put(endpoint, new TreePair(null, tree));
logger.debug("Stored remote tree for " + cf + " from " + endpoint);
}
}
for (Differencer differencer : differencers)
{
logger.info("Queueing comparison " + differencer);
StageManager.getStage(StageManager.AE_SERVICE_STAGE).execute(differencer);
}
}
/**
* Called by a Validator to send a valid tree to endpoints storing
* replicas of local data.
*
* @param validator A locally generated validator.
* @param local The local endpoint.
* @param neighbors A list of neighbor endpoints to send the tree to.
*/
void notifyNeighbors(Validator validator, InetAddress local, Collection<InetAddress> neighbors)
{
MessagingService ms = MessagingService.instance;
try
{
Message message = TreeResponseVerbHandler.makeVerb(local, validator);
logger.info("Sending AEService tree for " + validator.cf + " to: " + neighbors);
for (InetAddress neighbor : neighbors)
ms.sendOneWay(message, neighbor);
}
catch (Exception e)
{
logger.error("Could not send valid tree to endpoints: " + neighbors, e);
}
}
/**
* Should only be used in AE_SERVICE_STAGE or for testing.
*
* @param table Table containing cf.
* @param cf The column family.
* @param remote The remote endpoint for the rendezvous.
* @return The tree pair for the given rendezvous if it exists, else null.
*/
TreePair getRendezvousPair_TestsOnly(String table, String cf, InetAddress remote)
{
return rendezvousPairs(new CFPair(table, cf)).get(remote);
}
/**
* A Strategy to handle building and validating a merkle tree for a column family.
*
* Lifecycle:
* 1. prepare() - Initialize tree with samples.
* 2. add() - 0 or more times, to add hashes to the tree.
* 3. complete() - Enqueues any operations that were blocked waiting for a valid tree.
*/
public static class Validator implements Callable<Object>
{
public final CFPair cf; // TODO keep a CFS reference as a field instead of its string representation
public final MerkleTree tree;
// the minimum token sorts first, but falls into the last range
private transient List<MerkleTree.RowHash> minrows;
// null when all rows with the min token have been consumed
private transient Token mintoken;
private transient long validated;
private transient MerkleTree.TreeRange range;
private transient MerkleTree.TreeRangeIterator ranges;
public final static MerkleTree.RowHash EMPTY_ROW = new MerkleTree.RowHash(null, new byte[0]);
Validator(CFPair cf)
{
this(cf,
// TODO: memory usage (maxsize) should either be tunable per
// CF, globally, or as shared for all CFs in a cluster
new MerkleTree(DatabaseDescriptor.getPartitioner(), MerkleTree.RECOMMENDED_DEPTH, (int)Math.pow(2, 15)));
}
Validator(CFPair cf, MerkleTree tree)
{
assert cf != null && tree != null;
this.cf = cf;
this.tree = tree;
minrows = new ArrayList<MerkleTree.RowHash>();
mintoken = null;
validated = 0;
range = null;
ranges = null;
}
public void prepare(ColumnFamilyStore cfs)
{
List<DecoratedKey> keys = new ArrayList<DecoratedKey>();
for (IndexSummary.KeyPosition info: cfs.allIndexPositions())
keys.add(info.key);
if (keys.isEmpty())
{
// use an even tree distribution
tree.init();
}
else
{
int numkeys = keys.size();
Random random = new Random();
// sample the column family using random keys from the index
while (true)
{
DecoratedKey dk = keys.get(random.nextInt(numkeys));
if (!tree.split(dk.token))
break;
}
}
logger.debug("Prepared AEService tree of size " + tree.size() + " for " + cf);
mintoken = tree.partitioner().getMinimumToken();
ranges = tree.invalids(new Range(mintoken, mintoken));
}
/**
* Called (in order) for every row present in the CF.
* Hashes the row, and adds it to the tree being built.
*
* There are four possible cases:
* 1. Token is greater than range.right (we haven't generated a range for it yet),
* 2. Token is less than/equal to range.left (the range was valid),
* 3. Token is contained in the range (the range is in progress),
* 4. No more invalid ranges exist.
*
* TODO: Because we only validate completely empty trees at the moment, we
* do not bother dealing with case 2 and case 4 should result in an error.
*
* Additionally, there is a special case for the minimum token, because
* although it sorts first, it is contained in the last possible range.
*
* @param row The row.
*/
public void add(CompactedRow row)
{
if (mintoken != null)
{
assert ranges != null : "Validator was not prepared()";
// check for the minimum token special case
if (row.key.token.compareTo(mintoken) == 0)
{
// and store it to be appended when we complete
minrows.add(rowHash(row));
return;
}
mintoken = null;
}
if (range == null)
range = ranges.next();
// generate new ranges as long as case 1 is true
while (!range.contains(row.key.token))
{
// add the empty hash, and move to the next range
range.addHash(EMPTY_ROW);
range = ranges.next();
}
// case 3 must be true: mix in the hashed row
range.addHash(rowHash(row));
}
private MerkleTree.RowHash rowHash(CompactedRow row)
{
validated++;
// MerkleTree uses XOR internally, so we want lots of output bits here
byte[] rowhash = FBUtilities.hash("SHA-256", row.key.key.getBytes(), row.buffer.getData());
return new MerkleTree.RowHash(row.key.token, rowhash);
}
/**
* Registers the newly created tree for rendezvous in AE_SERVICE_STAGE.
*/
public void complete()
{
assert ranges != null : "Validator was not prepared()";
if (range != null)
range.addHash(EMPTY_ROW);
while (ranges.hasNext())
{
range = ranges.next();
range.addHash(EMPTY_ROW);
}
// add rows with the minimum token to the final range
if (!minrows.isEmpty())
for (MerkleTree.RowHash minrow : minrows)
range.addHash(minrow);
StageManager.getStage(StageManager.AE_SERVICE_STAGE).submit(this);
logger.debug("Validated " + validated + " rows into AEService tree for " + cf);
}
/**
* Called after the validation lifecycle to trigger additional action
* with the now valid tree. Runs in AE_SERVICE_STAGE.
*
* @return A meaningless object.
*/
public Object call() throws Exception
{
AntiEntropyService aes = AntiEntropyService.instance;
InetAddress local = FBUtilities.getLocalAddress();
Collection<InetAddress> neighbors = getNeighbors(cf.left);
// store the local tree and then broadcast it to our neighbors
aes.rendezvous(cf, local, tree);
aes.notifyNeighbors(this, local, neighbors);
// return any old object
return AntiEntropyService.class;
}
}
/**
* Compares two trees, and launches repairs for disagreeing ranges.
*/
public static class Differencer implements Runnable
{
public final CFPair cf;
public final InetAddress local;
public final InetAddress remote;
public final MerkleTree ltree;
public final MerkleTree rtree;
public final List<MerkleTree.TreeRange> differences;
public Differencer(CFPair cf, InetAddress local, InetAddress remote, MerkleTree ltree, MerkleTree rtree)
{
this.cf = cf;
this.local = local;
this.remote = remote;
this.ltree = ltree;
this.rtree = rtree;
differences = new ArrayList<MerkleTree.TreeRange>();
}
/**
* Compares our trees, and triggers repairs for any ranges that mismatch.
*/
public void run()
{
StorageService ss = StorageService.instance;
// restore partitioners (in case we were serialized)
if (ltree.partitioner() == null)
ltree.partitioner(ss.getPartitioner());
if (rtree.partitioner() == null)
rtree.partitioner(ss.getPartitioner());
// determine the ranges where responsibility overlaps
Set<Range> interesting = new HashSet(ss.getRangesForEndPoint(cf.left, local));
interesting.retainAll(ss.getRangesForEndPoint(cf.left, remote));
// compare trees, and filter out uninteresting differences
for (MerkleTree.TreeRange diff : MerkleTree.difference(ltree, rtree))
{
for (Range localrange: interesting)
{
if (diff.intersects(localrange))
{
differences.add(diff);
break; // the inner loop
}
}
}
// choose a repair method based on the significance of the difference
float difference = differenceFraction();
try
{
if (difference == 0.0)
{
logger.debug("Endpoints " + local + " and " + remote + " are consistent for " + cf);
return;
}
performStreamingRepair();
}
catch(IOException e)
{
throw new RuntimeException(e);
}
}
/**
* @return the fraction of the keyspace that is different, as represented by our
* list of different ranges. A range at depth 0 == 1.0, at depth 1 == 0.5, etc.
*/
float differenceFraction()
{
double fraction = 0.0;
for (MerkleTree.TreeRange diff : differences)
fraction += 1.0 / Math.pow(2, diff.depth);
return (float)fraction;
}
/**
* Sends our list of differences to the remote endpoint using the
* Streaming API.
*/
void performStreamingRepair() throws IOException
{
logger.info("Performing streaming repair of " + differences.size() + " ranges to " + remote + " for " + cf);
ColumnFamilyStore cfstore = Table.open(cf.left).getColumnFamilyStore(cf.right);
try
{
List<Range> ranges = new ArrayList<Range>(differences);
final List<SSTableReader> sstables = CompactionManager.instance.submitAnticompaction(cfstore, ranges, remote).get();
Future f = StageManager.getStage(StageManager.STREAM_STAGE).submit(new WrappedRunnable()
{
protected void runMayThrow() throws Exception
{
StreamOut.transferSSTables(remote, sstables, cf.left);
StreamOutManager.remove(remote);
}
});
f.get();
}
catch(Exception e)
{
throw new IOException("Streaming repair failed.", e);
}
logger.info("Finished streaming repair to " + remote + " for " + cf);
}
public String toString()
{
return "#<Differencer " + cf + " local=" + local + " remote=" + remote + ">";
}
}
/**
* Handler for requests from remote nodes to generate a valid tree.
* The payload is a CFPair representing the columnfamily to validate.
*/
public static class TreeRequestVerbHandler implements IVerbHandler, ICompactSerializer<CFPair>
{
public static final TreeRequestVerbHandler SERIALIZER = new TreeRequestVerbHandler();
static Message makeVerb(String table, String cf)
{
try
{
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
SERIALIZER.serialize(new CFPair(table, cf), dos);
return new Message(FBUtilities.getLocalAddress(), StageManager.AE_SERVICE_STAGE, StorageService.Verb.TREE_REQUEST, bos.toByteArray());
}
catch(IOException e)
{
throw new RuntimeException(e);
}
}
public void serialize(CFPair treerequest, DataOutputStream dos) throws IOException
{
dos.writeUTF(treerequest.left);
dos.writeUTF(treerequest.right);
}
public CFPair deserialize(DataInputStream dis) throws IOException
{
return new CFPair(dis.readUTF(), dis.readUTF());
}
/**
* Trigger a validation compaction which will return the tree upon completion.
*/
public void doVerb(Message message)
{
byte[] bytes = message.getMessageBody();
ByteArrayInputStream buffer = new ByteArrayInputStream(bytes);
try
{
CFPair cf = this.deserialize(new DataInputStream(buffer));
// trigger readonly-compaction
logger.debug("Queueing validation compaction for " + cf + ", " + message.getFrom());
ColumnFamilyStore store = Table.open(cf.left).getColumnFamilyStore(cf.right);
Validator validator = new Validator(cf);
CompactionManager.instance.submitValidation(store, validator);
}
catch (IOException e)
{
throw new IOError(e);
}
}
}
/**
* Handler for responses from remote nodes which contain a valid tree.
* The payload is a completed Validator object from the remote endpoint.
*/
public static class TreeResponseVerbHandler implements IVerbHandler, ICompactSerializer<Validator>
{
public static final TreeResponseVerbHandler SERIALIZER = new TreeResponseVerbHandler();
static Message makeVerb(InetAddress local, Validator validator)
{
try
{
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
SERIALIZER.serialize(validator, dos);
return new Message(local, StageManager.AE_SERVICE_STAGE, StorageService.Verb.TREE_RESPONSE, bos.toByteArray());
}
catch(IOException e)
{
throw new RuntimeException(e);
}
}
public void serialize(Validator v, DataOutputStream dos) throws IOException
{
TreeRequestVerbHandler.SERIALIZER.serialize(v.cf, dos);
ObjectOutputStream oos = new ObjectOutputStream(dos);
oos.writeObject(v.tree);
oos.flush();
}
public Validator deserialize(DataInputStream dis) throws IOException
{
final CFPair cf = TreeRequestVerbHandler.SERIALIZER.deserialize(dis);
ObjectInputStream ois = new ObjectInputStream(dis);
try
{
return new Validator(cf, (MerkleTree)ois.readObject());
}
catch(Exception e)
{
throw new RuntimeException(e);
}
}
public void doVerb(Message message)
{
byte[] bytes = message.getMessageBody();
ByteArrayInputStream buffer = new ByteArrayInputStream(bytes);
try
{
// deserialize the remote tree, and register it
Validator rvalidator = this.deserialize(new DataInputStream(buffer));
AntiEntropyService.instance.rendezvous(rvalidator.cf, message.getFrom(), rvalidator.tree);
}
catch (IOException e)
{
throw new IOError(e);
}
}
}
/**
* A tuple of table and cf.
*/
static class CFPair extends Pair<String,String>
{
public CFPair(String table, String cf)
{
super(table, cf);
assert table != null && cf != null;
}
}
/**
* A tuple of a local and remote tree. One of the trees should be null, but
* not both.
*/
static class TreePair extends Pair<MerkleTree,MerkleTree>
{
public TreePair(MerkleTree local, MerkleTree remote)
{
super(local, remote);
assert local != null ^ remote != null;
}
}
}
| |
package edu.umass.cs.jfoley.coop.front;
import ciir.jfoley.chai.collections.Pair;
import ciir.jfoley.chai.collections.TopKHeap;
import ciir.jfoley.chai.collections.util.ListFns;
import edu.umass.cs.ciir.waltz.compat.galago.iters.GalagoCountIterator;
import edu.umass.cs.ciir.waltz.dociter.ListBlockPostingsIterator;
import edu.umass.cs.ciir.waltz.dociter.movement.BlockPostingsMover;
import edu.umass.cs.ciir.waltz.dociter.movement.IdSetMover;
import edu.umass.cs.ciir.waltz.dociter.movement.Mover;
import edu.umass.cs.ciir.waltz.dociter.movement.PostingMover;
import edu.umass.cs.ciir.waltz.postings.SimplePosting;
import edu.umass.cs.ciir.waltz.postings.positions.PositionsList;
import org.junit.Test;
import org.lemurproject.galago.core.retrieval.ScoredDocument;
import org.lemurproject.galago.core.retrieval.iterator.ScoreCombinationIterator;
import org.lemurproject.galago.core.retrieval.iterator.ScoreIterator;
import org.lemurproject.galago.core.retrieval.iterator.scoring.JelinekMercerScoringIterator;
import org.lemurproject.galago.core.retrieval.processing.ScoringContext;
import org.lemurproject.galago.core.retrieval.query.NodeParameters;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.*;
import java.util.function.Function;
import static org.junit.Assert.assertEquals;
/**
* @author jfoley
*/
public class QueryEngineTest {
public static class FakePositionsNode implements QueryEngine.QCNode<PositionsList> {
@Override public Class<PositionsList> getResultClass() { return PositionsList.class; }
@Override public QueryEngine.ChildMovingLogic getMovingLogic() { return QueryEngine.ChildMovingLogic.NA; }
@Override public Collection<? extends QueryEngine.QCNode<?>> children() { return Collections.emptyList(); }
@Nullable
@Override
public PositionsList calculate(QueryEngine.QueryEvaluationContext ctx, int document) { throw new UnsupportedOperationException(); }
}
@Test
public void testCalculateMovingLogic() {
FakePositionsNode a = new FakePositionsNode();
FakePositionsNode b = new FakePositionsNode();
FakePositionsNode c = new FakePositionsNode();
QueryEngine.AbstractPhraseNode ab = new QueryEngine.AbstractPhraseNode(Arrays.asList(a,b));
QueryEngine.AbstractPhraseNode cab = new QueryEngine.AbstractPhraseNode(Arrays.asList(ab,c));
QueryEngine.AbstractPhraseNode aab = new QueryEngine.AbstractPhraseNode(Arrays.asList(ab,a));
QueryEngine.QCNode<?> cORab = new QueryEngine.AbstractSynonymNode(Arrays.asList(c, ab));
assert(ab.calculateMovingLogic() == QueryEngine.ChildMovingLogic.AND);
assert(cab.calculateMovingLogic() == QueryEngine.ChildMovingLogic.AND);
assert(aab.calculateMovingLogic() == QueryEngine.ChildMovingLogic.AND);
assert(cORab.getMovingLogic() == QueryEngine.ChildMovingLogic.OR);
assert(cORab.calculateMovingLogic() == QueryEngine.ChildMovingLogic.OR);
}
public static class FakeCountsNode implements QueryEngine.QCNode<Integer>, QueryEngine.MoverNode, QueryEngine.CountableNode {
final List<Pair<Integer, Integer>> postings;
int cf;
public FakeCountsNode(List<Pair<Integer, Integer>> postings) {
this.postings = postings;
this.cf = 0;
for (Pair<Integer, Integer> posting : postings) {
cf += posting.right;
}
}
@Override
public QueryEngine.ChildMovingLogic getMovingLogic() {
return QueryEngine.ChildMovingLogic.NA;
}
@Override
public Collection<? extends QueryEngine.QCNode<?>> children() { return Collections.emptyList(); }
@Nullable
@Override
public Integer calculate(QueryEngine.QueryEvaluationContext ctx, int document) {
for (Pair<Integer, Integer> posting : postings) {
if(posting.left == document) {
return posting.right;
}
}
return null;
}
@Override
public Collection<Mover> getChildMovers() {
return Collections.singletonList(new IdSetMover(ListFns.map(postings, Pair::getKey)));
}
@Override
public int getCollectionFrequency() {
return cf;
}
public PostingMover<Integer> asMover() {
return new BlockPostingsMover<>(new ListBlockPostingsIterator<>(ListFns.lazyMap(postings, (pair) -> new SimplePosting<>(pair.left, pair.right))));
}
}
@Test
public void testQueryLikelihood() throws IOException {
final int docLength = 15;
final int collectionLength = 1000;
// expect: 2,4,1,6,bgs
List<Pair<Integer, Integer>> aPostings = Arrays.asList(Pair.of(1, 5), Pair.of(2, 1), Pair.of(4, 3));
List<Pair<Integer, Integer>> bPostings = Arrays.asList( Pair.of(2, 5), Pair.of(4, 1), Pair.of(6, 3));
FakeCountsNode aCounts = new FakeCountsNode(aPostings);
FakeCountsNode bCounts = new FakeCountsNode(bPostings);
GalagoCountIterator iter = new GalagoCountIterator(aCounts.asMover());
GalagoCountIterator lengths = new GalagoCountIterator(new BlockPostingsMover<>(new ListBlockPostingsIterator<>(
ListFns.fill(10, (x) -> new SimplePosting<>(x, 15)))));
NodeParameters jmParam = NodeParameters.create()
.set("lambda", 0.8)
.set("collectionLength", collectionLength)
.set("maximumCount", 1);
ScoreIterator galagoQL = new ScoreCombinationIterator(
NodeParameters.create().set("norm", true),
new ScoreIterator[] {
new JelinekMercerScoringIterator(
jmParam.clone().set("nodeFrequency", aCounts.getCollectionFrequency()),
lengths,
new GalagoCountIterator(aCounts.asMover())
),
new JelinekMercerScoringIterator(
jmParam.clone().set("nodeFrequency", bCounts.getCollectionFrequency()),
lengths,
new GalagoCountIterator(bCounts.asMover())
)
}
);
Function<Integer, Double> scoreWithGalago = (docId) -> {
ScoringContext ctx = new ScoringContext(docId);
try {
galagoQL.reset();
galagoQL.syncTo(docId);
return galagoQL.score(ctx);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
QueryEngine.CombineNode ql = new QueryEngine.CombineNode(Arrays.asList(
new QueryEngine.LinearSmoothingNode(aCounts),
new QueryEngine.LinearSmoothingNode(bCounts)));
assertEquals(QueryEngine.ChildMovingLogic.OR, ql.calculateMovingLogic());
QueryEngine.QueryEvaluationContext fakeIndex = new QueryEngine.QueryEvaluationContext() {
@Override public int getLength(int document) { return docLength; }
@Override public double getCollectionLength() { return collectionLength; }
@Override public QueryEngine.QCNode<Integer> getUnigram(int lhs) throws IOException { return null; }
@Override public QueryEngine.QCNode<Integer> getBigram(int lhs, int rhs) throws IOException { return null; }
@Override public QueryEngine.QCNode<Integer> getUBigram(int lhs, int rhs) throws IOException { return null; }
};
ql.setup(fakeIndex);
double backgroundScore = ql.score(fakeIndex, 0);
assertEquals(backgroundScore, ql.score(fakeIndex, 7), 0.00001);
ArrayList<ScoredDocument> sdoc = new ArrayList<>();
for (int i = 0; i < 7; i++) {
double newScore = ql.score(fakeIndex, i);
double gScore = scoreWithGalago.apply(i);
//System.out.println("["+i+"] neo="+newScore+" galago="+gScore);
assertEquals(gScore, newScore, 0.001);
sdoc.add(new ScoredDocument(i, newScore));
}
Collections.sort(sdoc, new ScoredDocument.ScoredDocumentComparator().reversed());
assertEquals(2, sdoc.get(0).document);
assertEquals(4, sdoc.get(1).document);
assertEquals(1, sdoc.get(2).document);
assertEquals(6, sdoc.get(3).document);
// background-scores
assertEquals(0, sdoc.get(4).document);
assertEquals(backgroundScore, sdoc.get(4).score, 0.0001);
assertEquals(3, sdoc.get(5).document);
assertEquals(backgroundScore, sdoc.get(5).score, 0.0001);
assertEquals(5, sdoc.get(6).document);
assertEquals(backgroundScore, sdoc.get(6).score, 0.0001);
// Now try mover:
TopKHeap<ScoredDocument> best = new TopKHeap<>(3);
Mover mover = QueryEngine.createMover(ql);
mover.execute((doc) -> {
best.offer(new ScoredDocument(doc, ql.score(fakeIndex, doc)));
});
// OR-logic visits all 4 non-zero documents:
assertEquals(4, best.getTotalSeen());
List<ScoredDocument> results = new ArrayList<>(best.getSorted());
assertEquals(3, results.size());
assertEquals(2, results.get(0).document);
assertEquals(4, results.get(1).document);
assertEquals(1, results.get(2).document);
assertEquals(ListFns.slice(sdoc, 0, 3), results);
}
}
| |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.session;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.session.data.redis.RedisFlushMode;
import org.springframework.session.hazelcast.HazelcastFlushMode;
/**
* Configuration properties for Spring Session.
*
* @author Tommy Ludwig
* @author Stephane Nicoll
* @author Vedran Pavic
* @since 1.4.0
*/
@ConfigurationProperties(prefix = "spring.session")
public class SessionProperties {
/**
* Session store type.
*/
private StoreType storeType;
private Integer timeout;
private final Hazelcast hazelcast = new Hazelcast();
private final Jdbc jdbc = new Jdbc();
private final Mongo mongo = new Mongo();
private final Redis redis = new Redis();
public SessionProperties(ObjectProvider<ServerProperties> serverProperties) {
ServerProperties properties = serverProperties.getIfUnique();
this.timeout = (properties != null ? properties.getSession().getTimeout() : null);
}
public StoreType getStoreType() {
return this.storeType;
}
public void setStoreType(StoreType storeType) {
this.storeType = storeType;
}
/**
* Return the session timeout in seconds.
* @return the session timeout in seconds
* @see ServerProperties#getSession()
*/
public Integer getTimeout() {
return this.timeout;
}
public Hazelcast getHazelcast() {
return this.hazelcast;
}
public Jdbc getJdbc() {
return this.jdbc;
}
public Mongo getMongo() {
return this.mongo;
}
public Redis getRedis() {
return this.redis;
}
public static class Hazelcast {
/**
* Name of the map used to store sessions.
*/
private String mapName = "spring:session:sessions";
/**
* Sessions flush mode.
*/
private HazelcastFlushMode flushMode = HazelcastFlushMode.ON_SAVE;
public String getMapName() {
return this.mapName;
}
public void setMapName(String mapName) {
this.mapName = mapName;
}
public HazelcastFlushMode getFlushMode() {
return this.flushMode;
}
public void setFlushMode(HazelcastFlushMode flushMode) {
this.flushMode = flushMode;
}
}
public static class Jdbc {
private static final String DEFAULT_SCHEMA_LOCATION = "classpath:org/springframework/"
+ "session/jdbc/schema-@@platform@@.sql";
private static final String DEFAULT_TABLE_NAME = "SPRING_SESSION";
/**
* Path to the SQL file to use to initialize the database schema.
*/
private String schema = DEFAULT_SCHEMA_LOCATION;
/**
* Name of database table used to store sessions.
*/
private String tableName = DEFAULT_TABLE_NAME;
private final Initializer initializer = new Initializer();
public String getSchema() {
return this.schema;
}
public void setSchema(String schema) {
this.schema = schema;
}
public String getTableName() {
return this.tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public Initializer getInitializer() {
return this.initializer;
}
public class Initializer {
/**
* Create the required session tables on startup if necessary. Enabled
* automatically if the default table name is set or a custom schema is
* configured.
*/
private Boolean enabled;
public boolean isEnabled() {
if (this.enabled != null) {
return this.enabled;
}
boolean defaultTableName = DEFAULT_TABLE_NAME
.equals(Jdbc.this.getTableName());
boolean customSchema = !DEFAULT_SCHEMA_LOCATION
.equals(Jdbc.this.getSchema());
return (defaultTableName || customSchema);
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
}
public static class Mongo {
/**
* Collection name used to store sessions.
*/
private String collectionName = "sessions";
public String getCollectionName() {
return this.collectionName;
}
public void setCollectionName(String collectionName) {
this.collectionName = collectionName;
}
}
public static class Redis {
/**
* Namespace for keys used to store sessions.
*/
private String namespace = "";
/**
* Sessions flush mode.
*/
private RedisFlushMode flushMode = RedisFlushMode.ON_SAVE;
public String getNamespace() {
return this.namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public RedisFlushMode getFlushMode() {
return this.flushMode;
}
public void setFlushMode(RedisFlushMode flushMode) {
this.flushMode = flushMode;
}
}
}
| |
package com.app.rahul.popularmovies.fragment;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.support.v7.graphics.Palette;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.app.rahul.popularmovies.ApplicationController;
import com.app.rahul.popularmovies.R;
import com.app.rahul.popularmovies.activity.ReviewsListingActivity;
import com.app.rahul.popularmovies.adapter.TrailersAdapter;
import com.app.rahul.popularmovies.database.MoviesListingDao;
import com.app.rahul.popularmovies.model.movie_api.MoviesResponseBean;
import com.app.rahul.popularmovies.model.reviews_api.ReviewsListingResponse;
import com.app.rahul.popularmovies.model.trailers_api.TrailersResponseBean;
import com.app.rahul.popularmovies.network.AppRetrofit;
import com.app.rahul.popularmovies.utility.AppConstants;
import com.app.rahul.popularmovies.utility.Lg;
import com.app.rahul.popularmovies.utility.SnackBarBuilder;
import com.app.rahul.popularmovies.utility.SquareImageView;
import com.app.rahul.popularmovies.utility.Utility;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import java.util.ArrayList;
import java.util.HashMap;
import me.zhanghai.android.materialprogressbar.IndeterminateProgressDrawable;
import retrofit.Call;
import retrofit.Callback;
import retrofit.Response;
import retrofit.Retrofit;
/**
* Created by Rahul on 2/21/2016.
*/
public class MoviesDetailFragment extends BaseFragment implements View.OnClickListener {
private MoviesResponseBean.MoviesResult moviesResult;
private ProgressBar mTrailersProgressBar;
private RecyclerView mTrailersRecyclerView;
private LinearLayout reviewsContainer;
private TextView seeMoreReviews;
@Override
public int getLayoutById() {
return R.layout.fragment_movies_detail;
}
@Override
public void initUi() {
moviesResult = getArguments().getParcelable(AppConstants.EXTRA_INTENT_PARCEL);
final TextView movieName = (TextView) findViewById(R.id.movie_name);
Utility.setText(movieName, moviesResult.getTitle());
TextView movieDescTv = (TextView) findViewById(R.id.movie_desc);
Utility.setText(movieDescTv, moviesResult.getOverview());
if (TextUtils.isEmpty(moviesResult.getOverview())) {
movieDescTv.setVisibility(View.GONE);
}
String formattedDate = Utility.parseDateTime(moviesResult.getReleaseDate(), AppConstants.DATE_FORMAT1
, AppConstants.DATE_FORMAT2);
Utility.setText((TextView) findViewById(R.id.movie_release_year), formattedDate);
Utility.setText((TextView) findViewById(R.id.movie_rating), moviesResult.getVoteAverage() + " /10");
final SquareImageView movieImageView = (SquareImageView) findViewById(R.id.movie_image);
mTrailersProgressBar = Utility.getProgressBarInstance(mContext, R.id.trailer_progress_bar);
mTrailersProgressBar = (ProgressBar) findViewById(R.id.trailer_progress_bar);
mTrailersProgressBar.setIndeterminateDrawable(new IndeterminateProgressDrawable(mContext));
mTrailersRecyclerView = (RecyclerView) findViewById(R.id.movie_detail_trailers_list);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(mContext);
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
mTrailersRecyclerView.setLayoutManager(linearLayoutManager);
reviewsContainer = (LinearLayout) findViewById(R.id.reviews_listing_parent);
TextView markFavoriteTv = (TextView) findViewById(R.id.mark_favorite);
markFavoriteTv.setOnClickListener(this);
seeMoreReviews = (TextView) findViewById(R.id.see_more_reviews);
seeMoreReviews.setOnClickListener(this);
seeMoreReviews.setVisibility(View.GONE);
if (!TextUtils.isEmpty(moviesResult.getPosterPath()))
Picasso.with(mContext)
.load(AppConstants.BASE_THUMB_IMAGE_URL + moviesResult.getPosterPath())
.placeholder(R.drawable.placeholder)
.error(R.drawable.placeholder)
.into(new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
movieImageView.setImageBitmap(bitmap);
// Asynchronous
Palette.from(bitmap).generate(new Palette.PaletteAsyncListener() {
public void onGenerated(Palette p) {
// Use generated instance
Palette.Swatch vibrantSwatch = p.getVibrantSwatch();
if (vibrantSwatch != null) {
movieName.setTextColor(vibrantSwatch.getTitleTextColor());
((LinearLayout) movieName.getParent()).setBackgroundColor(vibrantSwatch.getRgb());
}
int defaultColor = getResources().getColor(R.color.colorPrimary);
setToolBarColor(p.getLightVibrantColor(defaultColor));
setToolBarTextColor(p.getDarkMutedColor(defaultColor));
Lg.i("Palette", p.toString());
}
});
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {
}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
}
});
else {
movieImageView.setImageResource(R.drawable.placeholder);
}
getMoviesDetail();
getMovieTrailers();
getMovieReviews();
setFavoriteText();
}
private void setFavoriteText() {
MoviesListingDao moviesListingDao = new MoviesListingDao(mContext);
if (moviesListingDao.isMovieFavourite(moviesResult)) {
Utility.setText((TextView) findViewById(R.id.mark_favorite), getString(R.string.mark_unfavorite));
} else {
Utility.setText((TextView) findViewById(R.id.mark_favorite), getString(R.string.mark_favorite));
}
}
private void getMoviesDetail() {
if (ApplicationController.getApplicationInstance().isNetworkConnected()) {
showProgressDialog(false);
HashMap<String, String> stringHashMap = new HashMap<>();
stringHashMap.put(AppConstants.PARAM_API_KEY, AppConstants.API_KEY);
Call<MoviesResponseBean.MoviesResult> beanCall = AppRetrofit.getInstance().getApiServices().apiMoviesDetail(moviesResult.getId(), stringHashMap);
beanCall.enqueue(new Callback<MoviesResponseBean.MoviesResult>() {
@Override
public void onResponse(Response<MoviesResponseBean.MoviesResult> response, Retrofit retrofit) {
showProgressDialog(false);
MoviesResponseBean.MoviesResult moviesResult2 = response.body();
if (moviesResult2 != null) {
Utility.setText((TextView) findViewById(R.id.movie_runtime), moviesResult2.getRuntime() + " min");
Utility.setText((TextView) findViewById(R.id.movie_tagline), moviesResult2.getTagLine());
}
}
@Override
public void onFailure(Throwable t) {
showProgressDialog(false);
Lg.i("Retro", t.toString());
}
});
} else {
mSnackBar = SnackBarBuilder.make(mParent, getString(R.string.no_internet_connction))
.setActionText(getString(R.string.retry))
.onSnackBarClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
getMoviesDetail();
}
})
.build();
}
}
private void getMovieTrailers() {
findViewById(R.id.trailer_list_parent).setVisibility(View.GONE);
if (ApplicationController.getApplicationInstance().isNetworkConnected() && isAdded()) {
mTrailersProgressBar.setVisibility(View.VISIBLE);
HashMap<String, String> stringHashMap = new HashMap<>();
stringHashMap.put(AppConstants.PARAM_API_KEY, AppConstants.API_KEY);
Call<TrailersResponseBean> beanCall = AppRetrofit.getInstance().getApiServices().apiMovieTrailers(moviesResult.getId(), stringHashMap);
beanCall.enqueue(new Callback<TrailersResponseBean>() {
@Override
public void onResponse(Response<TrailersResponseBean> response1, Retrofit retrofit) {
mTrailersProgressBar.setVisibility(View.GONE);
TrailersResponseBean responseBean = response1.body();
if (responseBean != null && responseBean.getResults() != null && !responseBean.getResults().isEmpty()) {
TrailersAdapter trailersAdapter = new TrailersAdapter(mContext, responseBean.getResults());
mTrailersRecyclerView.setAdapter(trailersAdapter);
findViewById(R.id.trailer_list_parent).setVisibility(View.VISIBLE);
} else {
findViewById(R.id.trailer_list_parent).setVisibility(View.GONE);
}
}
@Override
public void onFailure(Throwable t) {
mTrailersProgressBar.setVisibility(View.GONE);
Lg.i("Retro", t.toString());
}
});
} else {
mSnackBar = SnackBarBuilder.make(mParent, getString(R.string.no_internet_connction))
.setActionText(getString(R.string.retry))
.onSnackBarClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
getMoviesDetail();
}
})
.build();
}
}
private void getMovieReviews() {
if (ApplicationController.getApplicationInstance().isNetworkConnected() && isAdded()) {
mTrailersProgressBar.setVisibility(View.VISIBLE);
HashMap<String, String> stringHashMap = new HashMap<>();
stringHashMap.put(AppConstants.PARAM_API_KEY, AppConstants.API_KEY);
Call<ReviewsListingResponse> beanCall = AppRetrofit.getInstance().getApiServices().apiMovieReviews(moviesResult.getId(), stringHashMap);
beanCall.enqueue(new Callback<ReviewsListingResponse>() {
@Override
public void onResponse(Response<ReviewsListingResponse> response1, Retrofit retrofit) {
ReviewsListingResponse responseBean = response1.body();
if (responseBean != null) {
ArrayList<ReviewsListingResponse.ReviewsEntity> reviewsEntities = responseBean.getResults();
if (reviewsEntities != null && !reviewsEntities.isEmpty()) {
addReviews(responseBean.getResults());
} else {
reviewsContainer.setVisibility(View.GONE);
}
}
}
@Override
public void onFailure(Throwable t) {
mTrailersProgressBar.setVisibility(View.GONE);
Lg.i("Retro", t.toString());
}
});
} else {
mSnackBar = SnackBarBuilder.make(mParent, getString(R.string.no_internet_connction))
.setActionText(getString(R.string.retry))
.onSnackBarClicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
getMoviesDetail();
}
})
.build();
}
}
private void addReviews(ArrayList<ReviewsListingResponse.ReviewsEntity> resultsEntityArrayList) {
ArrayList<ReviewsListingResponse.ReviewsEntity> results;
if (resultsEntityArrayList.size() > 3) {
seeMoreReviews.setVisibility(View.VISIBLE);
results = new ArrayList<>(resultsEntityArrayList.subList(0, 3));
} else {
results = resultsEntityArrayList;
}
for (int i = 0; i < results.size(); i++) {
View view = LayoutInflater.from(mContext).inflate(R.layout.layout_reviews_row, null);
TextView reviewContentTv = (TextView) view.findViewById(R.id.review_content_tv);
TextView reviewAuthorTv = (TextView) view.findViewById(R.id.review_author_tv);
reviewContentTv.setText(results.get(i).getContent());
reviewAuthorTv.setText(results.get(i).getAuthor());
reviewsContainer.addView(view);
}
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.mark_favorite:
MoviesListingDao moviesListingDao = new MoviesListingDao(mContext);
boolean isMovieFavorited = moviesListingDao.toggleFavouriteMovie(moviesResult);
if (isMovieFavorited)
mSnackBar = SnackBarBuilder.make(mParent, moviesResult.getTitle() +
mContext.getString(R.string.add_to_fav)).build();
else {
mSnackBar = SnackBarBuilder.make(mParent, moviesResult.getTitle() +
mContext.getString(R.string.removed_from_favourites)).build();
}
setFavoriteText();
break;
case R.id.see_more_reviews:
Intent intent = new Intent(mContext, ReviewsListingActivity.class);
intent.putExtra(AppConstants.EXTRA_INTENT_PARCEL, moviesResult.getId());
startActivity(intent);
break;
}
}
}
| |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.mgt;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.AbstractIdentityUserOperationEventListener;
import org.wso2.carbon.identity.core.model.IdentityErrorMsgContext;
import org.wso2.carbon.identity.core.util.IdentityCoreConstants;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.mgt.beans.UserIdentityMgtBean;
import org.wso2.carbon.identity.mgt.beans.VerificationBean;
import org.wso2.carbon.identity.mgt.config.Config;
import org.wso2.carbon.identity.mgt.config.ConfigBuilder;
import org.wso2.carbon.identity.mgt.config.ConfigType;
import org.wso2.carbon.identity.mgt.config.StorageType;
import org.wso2.carbon.identity.mgt.constants.IdentityMgtConstants;
import org.wso2.carbon.identity.mgt.dto.NotificationDataDTO;
import org.wso2.carbon.identity.mgt.dto.UserIdentityClaimsDO;
import org.wso2.carbon.identity.mgt.dto.UserRecoveryDTO;
import org.wso2.carbon.identity.mgt.dto.UserRecoveryDataDO;
import org.wso2.carbon.identity.mgt.internal.IdentityMgtServiceComponent;
import org.wso2.carbon.identity.mgt.mail.Notification;
import org.wso2.carbon.identity.mgt.mail.NotificationBuilder;
import org.wso2.carbon.identity.mgt.mail.NotificationData;
import org.wso2.carbon.identity.mgt.policy.PolicyRegistry;
import org.wso2.carbon.identity.mgt.policy.PolicyViolationException;
import org.wso2.carbon.identity.mgt.store.UserIdentityDataStore;
import org.wso2.carbon.identity.mgt.util.UserIdentityManagementUtil;
import org.wso2.carbon.identity.mgt.util.Utils;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/**
* This is an implementation of UserOperationEventListener. This defines
* additional operations
* for some of the core user management operations
*/
public class IdentityMgtEventListener extends AbstractIdentityUserOperationEventListener {
/*
* The thread local variable to hold data with scope only to that variable.
* This is to pass data from doPreX() method to doPostX() and to avoid
* infinite loops.
*/
public static final ThreadLocal<HashMap<String, Object>> threadLocalProperties = new ThreadLocal<HashMap<String, Object>>() {
@Override
protected HashMap<String, Object> initialValue() {
return new HashMap<String, Object>();
}
};
private static final Log log = LogFactory.getLog(IdentityMgtEventListener.class);
private static final String EMPTY_PASSWORD_USED = "EmptyPasswordUsed";
private static final String USER_IDENTITY_DO = "UserIdentityDO";
private static final String EMAIL_NOTIFICATION_TYPE = "EMAIL";
private static final String UNLOCK_ADMIN_SYS_PROP = "unlockAdmin";
PolicyRegistry policyRegistry = null;
private UserIdentityDataStore module;
private IdentityMgtConfig identityMgtConfig;
// Set of thread local variable names
private static final String DO_PRE_AUTHENTICATE = "doPreAuthenticate";
private static final String DO_POST_AUTHENTICATE = "doPostAuthenticate";
private static final String DO_POST_ADD_USER = "doPostAddUser";
private static final String DO_PRE_SET_USER_CLAIM_VALUES = "doPreSetUserClaimValues";
private static final String DO_POST_UPDATE_CREDENTIAL = "doPostUpdateCredential";
public IdentityMgtEventListener() {
identityMgtConfig = IdentityMgtConfig.getInstance();
// Get the policy registry with the loaded policies.
policyRegistry = identityMgtConfig.getPolicyRegistry();
module = IdentityMgtConfig.getInstance().getIdentityDataStore();
String isAdminUnlockSysProp = System.getProperty(UNLOCK_ADMIN_SYS_PROP);
// If the system property unlockAdmin is set, then admin account will be unlocked
if(StringUtils.isNotBlank(isAdminUnlockSysProp) && Boolean.parseBoolean(isAdminUnlockSysProp)) {
log.info("unlockAdmin system property is defined. Hence unlocking admin account");
unlockAdmin();
}
}
/**
* This method will unlock the admin account
*/
private void unlockAdmin() {
String adminUserName =
IdentityMgtServiceComponent.getRealmService().getBootstrapRealmConfiguration().getAdminUserName();
try {
if (identityMgtConfig.isListenerEnable()) {
UserStoreManager userStoreMng = IdentityMgtServiceComponent.getRealmService()
.getBootstrapRealm().getUserStoreManager();
Map<String, String> claimMap = new HashMap<String, String>();
claimMap.put(UserIdentityDataStore.ACCOUNT_LOCK, Boolean.toString(false));
// Directly "do" method of this listener is called because at the time of this execution,
// this listener or any other listener may have no registered.
doPreSetUserClaimValues(adminUserName, claimMap, null, userStoreMng);
}
} catch (UserStoreException e) {
log.error("Error while unlocking admin account", e);
}
}
/**
* What is this ?
*/
@Override
public int getExecutionOrderId() {
int orderId = getOrderId(IdentityMgtEventListener.class.getName());
if (orderId != IdentityCoreConstants.EVENT_LISTENER_ORDER_ID) {
return orderId;
}
return 50;
}
/**
* This method checks if the user account exist or is locked. If the account is
* locked, the authentication process will be terminated after this method
* returning false.
*/
@Override
public boolean doPreAuthenticate(String userName, Object credential,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
// Top level try and finally blocks are used to unset thread local variables
try {
if (!threadLocalProperties.get().containsKey(DO_PRE_AUTHENTICATE)) {
threadLocalProperties.get().put(DO_PRE_AUTHENTICATE, true);
if (log.isDebugEnabled()) {
log.debug("Pre authenticator is called in IdentityMgtEventListener");
}
IdentityUtil.clearIdentityErrorMsg();
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
if (!config.isEnableAuthPolicy()) {
return true;
}
String domainName = userStoreManager.getRealmConfiguration().getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME);
String usernameWithDomain = UserCoreUtil.addDomainToName(userName, domainName);
boolean isUserExistInCurrentDomain = userStoreManager.isExistingUser(usernameWithDomain);
if (!isUserExistInCurrentDomain) {
IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST);
IdentityUtil.setIdentityErrorMsg(customErrorMessageContext);
if (log.isDebugEnabled()) {
log.debug("Username :" + userName + "does not exists in the system, ErrorCode :" + UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST);
}
if (config.isAuthPolicyAccountExistCheck()) {
throw new UserStoreException(UserCoreConstants.ErrorCode.USER_DOES_NOT_EXIST);
}
} else {
UserIdentityClaimsDO userIdentityDTO = module.load(userName, userStoreManager);
// if the account is locked, should not be able to log in
if (userIdentityDTO != null && userIdentityDTO.isAccountLocked()) {
// If unlock time is specified then unlock the account.
if ((userIdentityDTO.getUnlockTime() != 0) && (System.currentTimeMillis() >= userIdentityDTO.getUnlockTime())) {
userIdentityDTO.setAccountLock(false);
userIdentityDTO.setUnlockTime(0);
try {
module.store(userIdentityDTO, userStoreManager);
} catch (IdentityException e) {
throw new UserStoreException(
"Error while saving user store data for user : "
+ userName, e);
}
} else {
IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(
UserCoreConstants.ErrorCode.USER_IS_LOCKED,
userIdentityDTO.getFailAttempts(),
config.getAuthPolicyMaxLoginAttempts());
IdentityUtil.setIdentityErrorMsg(customErrorMessageContext);
String errorMsg = "User account is locked for user : " + userName
+ ". cannot login until the account is unlocked ";
log.warn(errorMsg);
throw new UserStoreException(UserCoreConstants.ErrorCode.USER_IS_LOCKED + " "
+ errorMsg);
}
}
}
}
return true;
} finally {
// remove thread local variable
threadLocalProperties.get().remove(DO_PRE_AUTHENTICATE);
}
}
/**
* This method locks the accounts after a configured number of
* authentication failure attempts. And unlocks accounts based on successful
* authentications.
*/
@Override
public boolean doPostAuthenticate(String userName, boolean authenticated,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
// Top level try and finally blocks are used to unset thread local variables
try {
if (!threadLocalProperties.get().containsKey(DO_POST_AUTHENTICATE)) {
threadLocalProperties.get().put(DO_POST_AUTHENTICATE, true);
if (log.isDebugEnabled()) {
log.debug("Post authenticator is called in IdentityMgtEventListener");
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
if (!config.isEnableAuthPolicy()) {
return true;
}
UserIdentityClaimsDO userIdentityDTO = module.load(userName, userStoreManager);
if (userIdentityDTO == null) {
userIdentityDTO = new UserIdentityClaimsDO(userName);
}
boolean userOTPEnabled = userIdentityDTO.getOneTimeLogin();
// One time password check
if (authenticated && config.isAuthPolicyOneTimePasswordCheck() &&
(!userStoreManager.isReadOnly()) && userOTPEnabled) {
// reset password of the user and notify user of the new password
String password = new String(UserIdentityManagementUtil.generateTemporaryPassword());
userStoreManager.updateCredentialByAdmin(userName, password);
// Get email user claim value
String email = userStoreManager.getUserClaimValue(userName, UserCoreConstants.ClaimTypeURIs.EMAIL_ADDRESS,
null);
if (StringUtils.isBlank(email)) {
throw new UserStoreException("No user email provided for user : " + userName);
}
List<NotificationSendingModule> notificationModules =
config.getNotificationSendingModules();
if (notificationModules != null) {
NotificationDataDTO notificationData = new NotificationDataDTO();
NotificationData emailNotificationData = new NotificationData();
String emailTemplate = null;
int tenantId = userStoreManager.getTenantId();
String firstName = null;
try {
firstName =
Utils.getClaimFromUserStoreManager(userName, tenantId,
"http://wso2.org/claims/givenname");
} catch (IdentityException e2) {
throw new UserStoreException("Could not load user given name", e2);
}
emailNotificationData.setTagData("first-name", firstName);
emailNotificationData.setTagData("user-name", userName);
emailNotificationData.setTagData("otp-password", password);
emailNotificationData.setSendTo(email);
Config emailConfig = null;
ConfigBuilder configBuilder = ConfigBuilder.getInstance();
try {
emailConfig =
configBuilder.loadConfiguration(ConfigType.EMAIL,
StorageType.REGISTRY,
tenantId);
} catch (Exception e1) {
throw new UserStoreException(
"Could not load the email template configuration for user : "
+ userName, e1);
}
emailTemplate = emailConfig.getProperty("otp");
Notification emailNotification = null;
try {
emailNotification =
NotificationBuilder.createNotification(EMAIL_NOTIFICATION_TYPE, emailTemplate,
emailNotificationData);
} catch (Exception e) {
throw new UserStoreException(
"Could not create the email notification for template: "
+ emailTemplate, e);
}
NotificationSender sender = new NotificationSender();
for (NotificationSendingModule notificationSendingModule : notificationModules) {
if (IdentityMgtConfig.getInstance().isNotificationInternallyManaged()) {
notificationSendingModule.setNotificationData(notificationData);
notificationSendingModule.setNotification(emailNotification);
sender.sendNotification(notificationSendingModule);
notificationData.setNotificationSent(true);
}
}
} else {
throw new UserStoreException("No notification modules configured");
}
}
// Password expire check. Not for OTP enabled users.
if (authenticated && config.isAuthPolicyExpirePasswordCheck() && !userOTPEnabled && (!userStoreManager.isReadOnly())) {
// TODO - password expire impl
// Refactor adduser and change password api to stamp the time
// Check user's expire time in the claim
// if expired redirect to change password
// else pass through
}
if (!authenticated && config.isAuthPolicyAccountLockOnFailure()) {
// reading the max allowed #of failure attempts
String domainName = userStoreManager.getRealmConfiguration().getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME);
String usernameWithDomain = UserCoreUtil.addDomainToName(userName, domainName);
boolean isUserExistInCurrentDomain = userStoreManager.isExistingUser(usernameWithDomain);
if (isUserExistInCurrentDomain) {
userIdentityDTO.setFailAttempts();
if (userIdentityDTO.getFailAttempts() >= config.getAuthPolicyMaxLoginAttempts()) {
log.info("User, " + userName + " has exceed the max failed login attempts. " +
"User account would be locked");
IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants.ErrorCode.USER_IS_LOCKED,
userIdentityDTO.getFailAttempts(), config.getAuthPolicyMaxLoginAttempts());
IdentityUtil.setIdentityErrorMsg(customErrorMessageContext);
if (log.isDebugEnabled()) {
log.debug("Username :" + userName + "Exceeded the maximum login attempts. User locked, ErrorCode :" + UserCoreConstants.ErrorCode.USER_IS_LOCKED);
}
userIdentityDTO.setAccountLock(true);
userIdentityDTO.setFailAttempts(0);
// lock time from the config
int lockTime = IdentityMgtConfig.getInstance().getAuthPolicyLockingTime();
if (lockTime != 0) {
userIdentityDTO.setUnlockTime(System.currentTimeMillis() +
(lockTime * 60 * 1000L));
}
} else {
IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants.ErrorCode.INVALID_CREDENTIAL,
userIdentityDTO.getFailAttempts(), config.getAuthPolicyMaxLoginAttempts());
IdentityUtil.setIdentityErrorMsg(customErrorMessageContext);
if (log.isDebugEnabled()) {
log.debug("Username :" + userName + "Invalid Credential, ErrorCode :" + UserCoreConstants.ErrorCode.INVALID_CREDENTIAL);
}
}
try {
module.store(userIdentityDTO, userStoreManager);
} catch (IdentityException e) {
throw new UserStoreException("Error while saving user store data for user : "
+ userName, e);
}
} else {
if (log.isDebugEnabled()) {
log.debug("User, " + userName + " is not exists in " + domainName);
}
}
} else {
// if the account was locked due to account verification process,
// the unlock the account and reset the number of failedAttempts
if (userIdentityDTO.isAccountLocked() || userIdentityDTO.getFailAttempts() > 0 || userIdentityDTO.getAccountLock()) {
userIdentityDTO.setAccountLock(false);
userIdentityDTO.setFailAttempts(0);
userIdentityDTO.setUnlockTime(0);
try {
module.store(userIdentityDTO, userStoreManager);
} catch (IdentityException e) {
throw new UserStoreException("Error while saving user store data for user : "
+ userName, e);
}
}
}
}
return true;
} finally {
// Remove thread local variable
threadLocalProperties.get().remove(DO_POST_AUTHENTICATE);
}
}
/**
* This method will set the default/random password if the password provided is
* null. The thread local parameter EMPTY_PASSWORD_USED will be used to
* track if the password empty in the doPostAddUser.
* This method will filter the security question URIs from claims and put those
* to the thread local properties.
*/
@Override
public boolean doPreAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
if (log.isDebugEnabled()) {
log.debug("Pre add user is called in IdentityMgtEventListener");
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
if (credential == null || StringUtils.isBlank(credential.toString())) {
log.error("Identity Management listener is disabled");
throw new UserStoreException("Ask Password Feature is disabled");
}
return true;
}
try {
// Enforcing the password policies.
if (credential != null &&
(credential instanceof StringBuffer && (credential.toString().trim().length() > 0))) {
policyRegistry.enforcePasswordPolicies(credential.toString(), userName);
}
} catch (PolicyViolationException pe) {
throw new UserStoreException(pe.getMessage(), pe);
}
// empty password account creation
if (credential == null ||
(credential instanceof StringBuffer && (credential.toString().trim().length() < 1))) {
if (!config.isEnableTemporaryPassword()) {
log.error("Temporary password property is disabled");
throw new UserStoreException("Ask Password Feature is disabled");
}
if (log.isDebugEnabled()) {
log.debug("Credentials are null. Using a temporary password as credentials");
}
// setting the thread-local to check in doPostAddUser
threadLocalProperties.get().put(EMPTY_PASSWORD_USED, true);
// temporary passwords will be used
char[] temporaryPassword = null;
if (IdentityMgtConfig.getInstance().getTemporaryDefaultPassword() != null) {
temporaryPassword = IdentityMgtConfig.getInstance().getTemporaryDefaultPassword()
.toCharArray();
} else {
temporaryPassword = UserIdentityManagementUtil.generateTemporaryPassword();
}
// setting the password value
((StringBuffer) credential).replace(0, temporaryPassword.length, new String(temporaryPassword));
}
// Filtering security question URIs from claims and add them to the thread local dto
Map<String, String> userDataMap = new HashMap<String, String>();
// TODO why challenge Q
Iterator<Entry<String, String>> it = claims.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> claim = it.next();
if (claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) ||
claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) {
userDataMap.put(claim.getKey(), claim.getValue());
it.remove();
}
}
UserIdentityClaimsDO identityDTO = new UserIdentityClaimsDO(userName, userDataMap);
// adding dto to thread local to be read again from the doPostAddUser method
threadLocalProperties.get().put(USER_IDENTITY_DO, identityDTO);
return true;
}
/**
* This method locks the created accounts based on the account policies or
* based on the account confirmation method being used. Two account
* confirmation methods are used : Temporary Password and Verification Code.
* In the case of temporary password is used the temporary password will be
* emailed to the user. In the case of verification code, the code will be
* emailed to the user. The security questions filter ad doPreAddUser will
* be persisted in this method.
*/
@Override
public boolean doPostAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
// Top level try and finally blocks are used to unset thread local variables
try {
if (!threadLocalProperties.get().containsKey(DO_POST_ADD_USER)) {
threadLocalProperties.get().put(DO_POST_ADD_USER, true);
if (log.isDebugEnabled()) {
log.debug("Post add user is called in IdentityMgtEventListener");
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
// reading the value from the thread local
UserIdentityClaimsDO userIdentityClaimsDO = (UserIdentityClaimsDO) threadLocalProperties.get().get(USER_IDENTITY_DO);
if (config.isEnableUserAccountVerification() && threadLocalProperties.get().containsKey(EMPTY_PASSWORD_USED)) {
// empty password account creation
String domainName = ((org.wso2.carbon.user.core.UserStoreManager) userStoreManager)
.getRealmConfiguration().getUserStoreProperty(
UserCoreConstants.RealmConfig.PROPERTY_DOMAIN_NAME);
if (!UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME.equals(domainName)) {
userName = domainName + UserCoreConstants.DOMAIN_SEPARATOR + userName;
}
// store identity data
userIdentityClaimsDO.setAccountLock(false);
try {
module.store(userIdentityClaimsDO, userStoreManager);
} catch (IdentityException e) {
//roleback user
userStoreManager.deleteUser(userName);
throw new UserStoreException("Error while saving user store for user : "
+ userName, e);
}
// store identity metadata
UserRecoveryDataDO metadataDO = new UserRecoveryDataDO();
metadataDO.setUserName(userName).setTenantId(userStoreManager.getTenantId())
.setCode((String) credential);
// set recovery data
RecoveryProcessor processor = new RecoveryProcessor();
VerificationBean verificationBean;
try {
verificationBean = processor.updateConfirmationCode(1, userName, userStoreManager.getTenantId());
} catch (IdentityException e) {
//roleback user
userStoreManager.deleteUser(userName);
throw new UserStoreException(
"Error while updating confirmation code for user : " + userName, e);
}
// preparing a bean to send the email
UserIdentityMgtBean bean = new UserIdentityMgtBean();
bean.setUserId(userName).setConfirmationCode(verificationBean.getKey())
.setRecoveryType(IdentityMgtConstants.Notification.TEMPORARY_PASSWORD)
.setEmail(claims.get(config.getAccountRecoveryClaim()));
UserRecoveryDTO recoveryDto = new UserRecoveryDTO(userName);
recoveryDto.setNotification(IdentityMgtConstants.Notification.ASK_PASSWORD);
recoveryDto.setNotificationType("EMAIL");
recoveryDto.setTenantId(userStoreManager.getTenantId());
recoveryDto.setConfirmationCode(verificationBean.getKey());
NotificationDataDTO notificationDto = null;
try {
notificationDto = processor.recoverWithNotification(recoveryDto);
} catch (IdentityException e) {
//roleback user
userStoreManager.deleteUser(userName);
throw new UserStoreException("Error while sending notification for user : "
+ userName, e);
}
return notificationDto != null && notificationDto.isNotificationSent();
}
// No account recoveries are defined, no email will be sent.
if (config.isAuthPolicyAccountLockOnCreation()) {
// accounts are locked. Admin should unlock
userIdentityClaimsDO.setAccountLock(true);
try {
config.getIdentityDataStore().store(userIdentityClaimsDO, userStoreManager);
} catch (IdentityException e) {
//roleback user
userStoreManager.deleteUser(userName);
throw new UserStoreException("Error while saving user store data for user : "
+ userName, e);
}
}
// When claims available in user add request like http://wso2.org/claims/identity/accountLocked
if (!config.isEnableUserAccountVerification() &&
!config.isAuthPolicyAccountLockOnCreation() && userIdentityClaimsDO != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Storing identity-mgt claims since they are available in the addUser request");
}
module.store(userIdentityClaimsDO, userStoreManager);
} catch (IdentityException e) {
//roleback user
userStoreManager.deleteUser(userName);
throw new UserStoreException("Error while saving user store data for user : "
+ userName, e);
}
}
}
return true;
} finally {
// Remove thread local variable
threadLocalProperties.get().remove(DO_POST_ADD_USER);
}
}
/**
* This method is used to check pre conditions when changing the user
* password.
*
*/
@Override
public boolean doPreUpdateCredential(String userName, Object newCredential,
Object oldCredential, UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
if (log.isDebugEnabled()) {
log.debug("Pre update credential is called in IdentityMgtEventListener");
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
try {
// Enforcing the password policies.
if (newCredential != null
&& (newCredential instanceof String && (newCredential.toString().trim()
.length() > 0))) {
policyRegistry.enforcePasswordPolicies(newCredential.toString(), userName);
}
} catch (PolicyViolationException pe) {
throw new UserStoreException(pe.getMessage(), pe);
}
return true;
}
/**
* This method is used when the admin is updating the credentials with an
* empty credential. A random password will be generated and will be mailed
* to the user.
*/
@Override
public boolean doPreUpdateCredentialByAdmin(String userName, Object newCredential,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
if (log.isDebugEnabled()) {
log.debug("Pre update credential by admin is called in IdentityMgtEventListener");
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
try {
// Enforcing the password policies.
if (newCredential != null
&& (newCredential instanceof StringBuffer && (newCredential.toString().trim()
.length() > 0))) {
policyRegistry.enforcePasswordPolicies(newCredential.toString(), userName);
}
} catch (PolicyViolationException pe) {
throw new UserStoreException(pe.getMessage(), pe);
}
if (newCredential == null
|| (newCredential instanceof StringBuffer && ((StringBuffer) newCredential)
.toString().trim().length() < 1)) {
if (!config.isEnableTemporaryPassword()) {
log.error("Empty passwords are not allowed");
return false;
}
if (log.isDebugEnabled()) {
log.debug("Credentials are null. Using a temporary password as credentials");
}
// temporary passwords will be used
char[] temporaryPassword = UserIdentityManagementUtil.generateTemporaryPassword();
// setting the password value
((StringBuffer) newCredential).replace(0, temporaryPassword.length, new String(
temporaryPassword));
UserIdentityMgtBean bean = new UserIdentityMgtBean();
bean.setUserId(userName);
bean.setConfirmationCode(newCredential.toString());
bean.setRecoveryType(IdentityMgtConstants.Notification.TEMPORARY_PASSWORD);
if (log.isDebugEnabled()) {
log.debug("Sending the temporary password to the user " + userName);
}
UserIdentityManagementUtil.notifyViaEmail(bean);
} else {
if (log.isDebugEnabled()) {
log.debug("Updating credentials of user " + userName
+ " by admin with a non-empty password");
}
}
return true;
}
/**
* This method checks if the updating claim is an user identity data or
* security question. Identity data and security questions are updated by
* the identity store, therefore they will not be added to the user store.
* Other claims are skipped to the set or update.
*/
@Override
public boolean doPreSetUserClaimValue(String userName, String claimURI, String claimValue,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
// security questions and identity claims are updated at the identity store
if (claimURI.contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) ||
claimURI.contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) {
// the whole listner to return and fail adding the cliam in doSetUserClaim
return true;
} else {
// a simple user claim. add it to the user store
return true;
}
}
/**
* As in the above method the user account lock claim, primary challenges
* claim will be separately handled. Identity claims will be removed from
* the claim set before adding claims to the user store.
*/
@Override
public boolean doPreSetUserClaimValues(String userName, Map<String, String> claims,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
IdentityUtil.clearIdentityErrorMsg();
boolean accountLocked = Boolean.parseBoolean(claims.get(UserIdentityDataStore.ACCOUNT_LOCK));
if (accountLocked) {
IdentityErrorMsgContext customErrorMessageContext = new IdentityErrorMsgContext(UserCoreConstants
.ErrorCode.USER_IS_LOCKED);
IdentityUtil.setIdentityErrorMsg(customErrorMessageContext);
}
// Top level try and finally blocks are used to unset thread local variables
try {
if (!threadLocalProperties.get().containsKey(DO_PRE_SET_USER_CLAIM_VALUES)) {
threadLocalProperties.get().put(DO_PRE_SET_USER_CLAIM_VALUES, true);
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
UserIdentityDataStore identityDataStore = IdentityMgtConfig.getInstance().getIdentityDataStore();
UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, userStoreManager);
if (identityDTO == null) {
identityDTO = new UserIdentityClaimsDO(userName);
}
Iterator<Entry<String, String>> it = claims.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> claim = it.next();
if (claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI)
|| claim.getKey().contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) {
String key = claim.getKey();
String value = claim.getValue();
identityDTO.setUserIdentityDataClaim(key, value);
it.remove();
}
}
// storing the identity claims and security questions
try {
identityDataStore.store(identityDTO, userStoreManager);
} catch (IdentityException e) {
throw new UserStoreException(
"Error while saving user store data for user : " + userName, e);
}
}
return true;
} finally {
// Remove thread local variable
threadLocalProperties.get().remove(DO_PRE_SET_USER_CLAIM_VALUES);
}
}
/**
* Deleting user from the identity database. What are the registry keys ?
*/
@Override
public boolean doPostDeleteUser(String userName, UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
// remove from the identity store
try {
IdentityMgtConfig.getInstance().getIdentityDataStore()
.remove(userName, userStoreManager);
} catch (IdentityException e) {
throw new UserStoreException("Error while removing user: " + userName
+ " from identity data store", e);
}
// deleting registry meta-data
UserRegistry registry = null;
try {
registry = IdentityMgtServiceComponent.getRegistryService().getConfigSystemRegistry(
userStoreManager.getTenantId());
String identityKeyMgtPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_KEYS
+ RegistryConstants.PATH_SEPARATOR + userStoreManager.getTenantId()
+ RegistryConstants.PATH_SEPARATOR + userName;
if (registry.resourceExists(identityKeyMgtPath)) {
registry.delete(identityKeyMgtPath);
}
} catch (RegistryException e) {
log.error("Error while deleting recovery data for user : " + userName + " in tenant : "
+ userStoreManager.getTenantId(), e);
}
return true;
}
/**
* Adding the user identity data to the claims set
*/
@Override
public boolean doPostGetUserClaimValues(String userName, String[] claims, String profileName,
Map<String, String> claimMap,
UserStoreManager storeManager)
throws UserStoreException {
if (!isEnable(this.getClass().getName())) {
return true;
}
IdentityMgtConfig config = IdentityMgtConfig.getInstance();
if (!config.isListenerEnable()) {
return true;
}
if (claimMap == null) {
claimMap = new HashMap<String, String>();
}
UserIdentityDataStore identityDataStore =
IdentityMgtConfig.getInstance()
.getIdentityDataStore();
// check if there are identity claims
boolean containsIdentityClaims = false;
for (String claim : claims) {
if (claim.contains(UserCoreConstants.ClaimTypeURIs.CHALLENGE_QUESTION_URI) ||
claim.contains(UserCoreConstants.ClaimTypeURIs.IDENTITY_CLAIM_URI)) {
containsIdentityClaims = true;
break;
}
}
// if there are no identity claims, let it go
if (!containsIdentityClaims) {
return true;
}
// there is/are identity claim/s . load the dto
UserIdentityClaimsDO identityDTO = identityDataStore.load(userName, storeManager);
// if no user identity data found, just continue
if (identityDTO == null) {
return true;
}
// data found, add the values for security questions and identity claims
for (String claim : claims) {
if (identityDTO.getUserDataMap().containsKey(claim)) {
claimMap.put(claim, identityDTO.getUserDataMap().get(claim));
}
}
return true;
}
/**
* Returning the user identity data as a claim
*/
@Override
public boolean doPostGetUserClaimValue(String userName, String claim, List<String> claimValue,
String profileName, UserStoreManager storeManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateCredential(String userName, Object credential, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/enums/feed_item_target_status.proto
package com.google.ads.googleads.v9.enums;
/**
* <pre>
* Container for enum describing possible statuses of a feed item target.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.FeedItemTargetStatusEnum}
*/
public final class FeedItemTargetStatusEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)
FeedItemTargetStatusEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use FeedItemTargetStatusEnum.newBuilder() to construct.
private FeedItemTargetStatusEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FeedItemTargetStatusEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new FeedItemTargetStatusEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FeedItemTargetStatusEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusProto.internal_static_google_ads_googleads_v9_enums_FeedItemTargetStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusProto.internal_static_google_ads_googleads_v9_enums_FeedItemTargetStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.class, com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.Builder.class);
}
/**
* <pre>
* Possible statuses of a feed item target.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.FeedItemTargetStatus}
*/
public enum FeedItemTargetStatus
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Feed item target is enabled.
* </pre>
*
* <code>ENABLED = 2;</code>
*/
ENABLED(2),
/**
* <pre>
* Feed item target has been removed.
* </pre>
*
* <code>REMOVED = 3;</code>
*/
REMOVED(3),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Feed item target is enabled.
* </pre>
*
* <code>ENABLED = 2;</code>
*/
public static final int ENABLED_VALUE = 2;
/**
* <pre>
* Feed item target has been removed.
* </pre>
*
* <code>REMOVED = 3;</code>
*/
public static final int REMOVED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static FeedItemTargetStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static FeedItemTargetStatus forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return ENABLED;
case 3: return REMOVED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<FeedItemTargetStatus>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
FeedItemTargetStatus> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<FeedItemTargetStatus>() {
public FeedItemTargetStatus findValueByNumber(int number) {
return FeedItemTargetStatus.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.getDescriptor().getEnumTypes().get(0);
}
private static final FeedItemTargetStatus[] VALUES = values();
public static FeedItemTargetStatus valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private FeedItemTargetStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.FeedItemTargetStatus)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum other = (com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible statuses of a feed item target.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.FeedItemTargetStatusEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusProto.internal_static_google_ads_googleads_v9_enums_FeedItemTargetStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusProto.internal_static_google_ads_googleads_v9_enums_FeedItemTargetStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.class, com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusProto.internal_static_google_ads_googleads_v9_enums_FeedItemTargetStatusEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum build() {
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum buildPartial() {
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum result = new com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum) {
return mergeFrom((com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum other) {
if (other == com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.FeedItemTargetStatusEnum)
private static final com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum();
}
public static com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FeedItemTargetStatusEnum>
PARSER = new com.google.protobuf.AbstractParser<FeedItemTargetStatusEnum>() {
@java.lang.Override
public FeedItemTargetStatusEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FeedItemTargetStatusEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<FeedItemTargetStatusEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FeedItemTargetStatusEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.FeedItemTargetStatusEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (c) 2002, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing.plaf.synth;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.plaf.basic.BasicInternalFrameUI;
import java.beans.*;
/**
* Provides the Synth L&F UI delegate for
* {@link JInternalFrame}.
*
* @author David Kloba
* @author Joshua Outwater
* @author Rich Schiavi
* @since 1.7
*/
public class SynthInternalFrameUI extends BasicInternalFrameUI
implements SynthUI, PropertyChangeListener {
private SynthStyle style;
/**
* Creates a new UI object for the given component.
*
* @param b component to create UI object for
* @return the UI object
*/
public static ComponentUI createUI(JComponent b) {
return new SynthInternalFrameUI((JInternalFrame)b);
}
protected SynthInternalFrameUI(JInternalFrame b) {
super(b);
}
/**
* {@inheritDoc}
*/
@Override
public void installDefaults() {
frame.setLayout(internalFrameLayout = createLayoutManager());
updateStyle(frame);
}
/**
* {@inheritDoc}
*/
@Override
protected void installListeners() {
super.installListeners();
frame.addPropertyChangeListener(this);
}
/**
* {@inheritDoc}
*/
@Override
protected void uninstallComponents() {
if (frame.getComponentPopupMenu() instanceof UIResource) {
frame.setComponentPopupMenu(null);
}
super.uninstallComponents();
}
/**
* {@inheritDoc}
*/
@Override
protected void uninstallListeners() {
frame.removePropertyChangeListener(this);
super.uninstallListeners();
}
private void updateStyle(JComponent c) {
SynthContext context = getContext(c, ENABLED);
SynthStyle oldStyle = style;
style = SynthLookAndFeel.updateStyle(context, this);
if (style != oldStyle) {
Icon frameIcon = frame.getFrameIcon();
if (frameIcon == null || frameIcon instanceof UIResource) {
frame.setFrameIcon(context.getStyle().getIcon(
context, "InternalFrame.icon"));
}
if (oldStyle != null) {
uninstallKeyboardActions();
installKeyboardActions();
}
}
context.dispose();
}
/**
* {@inheritDoc}
*/
@Override
protected void uninstallDefaults() {
SynthContext context = getContext(frame, ENABLED);
style.uninstallDefaults(context);
context.dispose();
style = null;
if(frame.getLayout() == internalFrameLayout) {
frame.setLayout(null);
}
}
/**
* {@inheritDoc}
*/
@Override
public SynthContext getContext(JComponent c) {
return getContext(c, getComponentState(c));
}
private SynthContext getContext(JComponent c, int state) {
return SynthContext.getContext(c, style, state);
}
private int getComponentState(JComponent c) {
return SynthLookAndFeel.getComponentState(c);
}
/**
* {@inheritDoc}
*/
@Override
protected JComponent createNorthPane(JInternalFrame w) {
titlePane = new SynthInternalFrameTitlePane(w);
titlePane.setName("InternalFrame.northPane");
return titlePane;
}
/**
* {@inheritDoc}
*/
@Override
protected ComponentListener createComponentListener() {
if (UIManager.getBoolean("InternalFrame.useTaskBar")) {
return new ComponentHandler() {
@Override public void componentResized(ComponentEvent e) {
if (frame != null && frame.isMaximum()) {
JDesktopPane desktop = (JDesktopPane)e.getSource();
for (Component comp : desktop.getComponents()) {
if (comp instanceof SynthDesktopPaneUI.TaskBar) {
frame.setBounds(0, 0,
desktop.getWidth(),
desktop.getHeight() - comp.getHeight());
frame.revalidate();
break;
}
}
}
// Update the new parent bounds for next resize, but don't
// let the super method touch this frame
JInternalFrame f = frame;
frame = null;
super.componentResized(e);
frame = f;
}
};
} else {
return super.createComponentListener();
}
}
/**
* Notifies this UI delegate to repaint the specified component.
* This method paints the component background, then calls
* the {@link #paint(SynthContext,Graphics)} method.
*
* <p>In general, this method does not need to be overridden by subclasses.
* All Look and Feel rendering code should reside in the {@code paint} method.
*
* @param g the {@code Graphics} object used for painting
* @param c the component being painted
* @see #paint(SynthContext,Graphics)
*/
@Override
public void update(Graphics g, JComponent c) {
SynthContext context = getContext(c);
SynthLookAndFeel.update(context, g);
context.getPainter().paintInternalFrameBackground(context,
g, 0, 0, c.getWidth(), c.getHeight());
paint(context, g);
context.dispose();
}
/**
* Paints the specified component according to the Look and Feel.
* <p>This method is not used by Synth Look and Feel.
* Painting is handled by the {@link #paint(SynthContext,Graphics)} method.
*
* @param g the {@code Graphics} object used for painting
* @param c the component being painted
* @see #paint(SynthContext,Graphics)
*/
@Override
public void paint(Graphics g, JComponent c) {
SynthContext context = getContext(c);
paint(context, g);
context.dispose();
}
/**
* Paints the specified component. This implementation does nothing.
*
* @param context context for the component being painted
* @param g the {@code Graphics} object used for painting
* @see #update(Graphics,JComponent)
*/
protected void paint(SynthContext context, Graphics g) {
}
/**
* {@inheritDoc}
*/
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
context.getPainter().paintInternalFrameBorder(context,
g, x, y, w, h);
}
/**
* {@inheritDoc}
*/
@Override
public void propertyChange(PropertyChangeEvent evt) {
SynthStyle oldStyle = style;
JInternalFrame f = (JInternalFrame)evt.getSource();
String prop = evt.getPropertyName();
if (SynthLookAndFeel.shouldUpdateStyle(evt)) {
updateStyle(f);
}
if (style == oldStyle &&
(prop == JInternalFrame.IS_MAXIMUM_PROPERTY ||
prop == JInternalFrame.IS_SELECTED_PROPERTY)) {
// Border (and other defaults) may need to change
SynthContext context = getContext(f, ENABLED);
style.uninstallDefaults(context);
style.installDefaults(context, this);
}
}
}
| |
/*
* Copyright (c) 2015. All Rights Reserved.
*/
package com.ndpmedia.rocketmq.store.tool;
import com.alibaba.rocketmq.client.exception.MQClientException;
import com.alibaba.rocketmq.client.producer.DefaultMQProducer;
import com.alibaba.rocketmq.client.producer.MessageQueueSelector;
import com.alibaba.rocketmq.client.producer.SendResult;
import com.alibaba.rocketmq.client.producer.selector.SelectMessageQueueByDataCenter;
import com.alibaba.rocketmq.client.producer.selector.Util;
import com.alibaba.rocketmq.client.store.DefaultLocalMessageStore;
import com.alibaba.rocketmq.common.message.Message;
import com.alibaba.rocketmq.common.message.MessageDecoder;
import com.alibaba.rocketmq.common.message.MessageQueue;
import com.google.common.util.concurrent.RateLimiter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.io.*;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LocalMessageStoreVerificationTool {
private static final String STORE_FILE_NAME_REGEX = "\\d+";
private static final Pattern STORE_FILE_NAME_PATTERN = Pattern.compile(STORE_FILE_NAME_REGEX);
private static final int MAGIC_CODE = 0xAABBCCDD ^ 1880681586 + 8;
private static final String CONFIG_FILE_NAME = ".config";
private static DefaultMQProducer producer;
private static MessageQueueSelector messageQueueSelector = new ExampleMessageQueueSelector();
private static void showUsageAndExit(Options options) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp("localMessageStoreVerificationTool", options);
System.exit(1);
}
public static void main(String[] args) throws IOException {
Options options = new Options();
options.addOption("p", "path", true, "local message store path, required");
options.addOption("r", "rate", true, "Send message rate, optional");
CommandLineParser commandLineParser = new DefaultParser();
try {
CommandLine commandLine = commandLineParser.parse(options, args);
if (!commandLine.hasOption("p")) {
showUsageAndExit(options);
}
String path = commandLine.getOptionValue("p");
float rate = -1F;
if (commandLine.hasOption("r")) {
rate = Float.parseFloat(commandLine.getOptionValue("r"));
}
try {
System.setProperty("enable_ssl", "true");
producer = new DefaultMQProducer("Tool");
producer.start();
} catch (MQClientException e) {
e.printStackTrace();
System.exit(1);
}
checkRecursively(new File(path), rate);
producer.shutdown();
} catch (ParseException e) {
showUsageAndExit(options);
}
}
private static void checkRecursively(File file, float rate) throws IOException {
if (file.isFile()) {
checkFile(file, rate);
} else {
String[] files = file.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
File f = new File(dir, name);
if (f.isDirectory()) {
return true;
}
Matcher matcher = STORE_FILE_NAME_PATTERN.matcher(name);
return matcher.matches();
}
});
if (null == files) {
return;
}
for (String f : files) {
checkRecursively(new File(file, f), rate);
}
}
}
private static void checkFile(File file, float rate) throws IOException {
File configFile = new File(file.getParentFile(), CONFIG_FILE_NAME);
InputStream inputStream = null;
AtomicLong writeIndex = new AtomicLong();
AtomicLong writeOffSet = new AtomicLong();
AtomicLong readIndex = new AtomicLong();
AtomicLong readOffSet = new AtomicLong();
try {
inputStream = new FileInputStream(configFile);
Properties properties = new Properties();
properties.load(inputStream);
writeIndex.set(null == properties.getProperty("writeIndex") ? 0L :
Long.parseLong(properties.getProperty("writeIndex")));
writeOffSet.set(null == properties.getProperty("writeOffSet") ? 0L :
Long.parseLong(properties.getProperty("writeOffSet")));
readIndex.set(null == properties.getProperty("readIndex") ? 0L :
Long.parseLong(properties.getProperty("readIndex")));
readOffSet.set(null == properties.getProperty("readOffSet") ? 0L :
Long.parseLong(properties.getProperty("readOffSet")));
} catch (IOException | NumberFormatException e) {
e.printStackTrace();
} finally {
if (null != inputStream) {
inputStream.close();
}
}
long fileNumber = Long.parseLong(file.getName());
if (fileNumber + DefaultLocalMessageStore.MESSAGES_PER_FILE < readIndex.get()) {
return;
}
long count = fileNumber;
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r");
if (readIndex.get() > fileNumber && readIndex.get() < fileNumber + DefaultLocalMessageStore.MESSAGES_PER_FILE) {
randomAccessFile.seek(readOffSet.get());
count = readIndex.get();
}
boolean hasError = false;
RateLimiter rateLimiter = null;
if (rate > 0) {
rateLimiter = RateLimiter.create(rate);
}
while (randomAccessFile.getFilePointer() + 4 + 4 < randomAccessFile.length()) {
int msgSize = randomAccessFile.readInt();
int magicCode = randomAccessFile.readInt();
if (magicCode != MAGIC_CODE) {
System.err.println("Illegal magic code found! Position: " + (randomAccessFile.getFilePointer() - 4));
System.err.println("Illegal Code: [" + magicCode + "], Assumed Code: [" + MAGIC_CODE + "]");
hasError = true;
// break;
}
if (!hasError) {
byte[] data = new byte[msgSize - 4 - 4];
randomAccessFile.readFully(data);
ByteBuffer byteBuffer = ByteBuffer.allocate(msgSize);
byteBuffer.putInt(msgSize);
byteBuffer.putInt(magicCode);
byteBuffer.put(data);
byteBuffer.flip();
final Message message = MessageDecoder.decode(byteBuffer, true, true);
System.out.println("Message recovered");
System.out.println("Msg Size: " + msgSize);
try {
System.out.println("Begin to send");
if (null != rateLimiter) {
rateLimiter.acquire();
}
SendResult sendResult = producer.send(message, messageQueueSelector, null);
System.out.println("Sending completes. No." + (++count) + " of " + writeIndex.get());
System.out.println("MsgId: " + sendResult.getMsgId());
} catch (Exception e) {
e.printStackTrace();
}
} else {
System.err.println("Begin to recover from error magic code.");
while (randomAccessFile.readInt() != MAGIC_CODE) {
if (randomAccessFile.getFilePointer() >= randomAccessFile.length()) {
break;
}
// keep reading
}
randomAccessFile.seek(randomAccessFile.getFilePointer() - 8);
hasError = false;
System.out.println("Recover from error done. Some broker messages might be skipped.");
}
}
randomAccessFile.close();
}
private static class ExampleMessageQueueSelector implements MessageQueueSelector {
@Override
public MessageQueue select(List<MessageQueue> mqs, Message msg, Object arg) {
List<MessageQueue> messageQueuesList = new ArrayList<>(mqs);
Collections.shuffle(messageQueuesList);
for (MessageQueue messageQueue : messageQueuesList) {
String brokerName = messageQueue.getBrokerName();
if (Util.LOCAL_DATA_CENTER_ID.equals(brokerName.split("_")[1])) {
return messageQueue;
}
}
return messageQueuesList.get(0);
}
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser;
import static org.chromium.base.test.util.Restriction.RESTRICTION_TYPE_NON_LOW_END_DEVICE;
import android.content.Context;
import android.os.Build;
import android.test.suitebuilder.annotation.LargeTest;
import android.util.SparseArray;
import android.util.SparseBooleanArray;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.MinAndroidSdkLevel;
import org.chromium.base.test.util.Restriction;
import org.chromium.chrome.browser.document.DocumentModeTestBase;
import org.chromium.chrome.browser.document.DocumentTab;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tabmodel.document.DocumentTabModelSelector;
import org.chromium.chrome.test.util.ChromeTabUtils;
import org.chromium.content.browser.BindingManager;
import org.chromium.content.browser.ChildProcessConnection;
import org.chromium.content.browser.ChildProcessLauncher;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.content_public.browser.LoadUrlParams;
/**
* Integration tests in document mode for the BindingManager API. This test plants a mock
* BindingManager implementation and verifies that the signals it relies on are correctly delivered.
*/
@MinAndroidSdkLevel(Build.VERSION_CODES.LOLLIPOP)
public class BindingManagerInDocumentModeIntegrationTest extends DocumentModeTestBase {
private static class MockBindingManager implements BindingManager {
// Maps pid to the last received visibility state of the renderer.
private final SparseBooleanArray mProcessInForegroundMap = new SparseBooleanArray();
// Maps pid to a string recording calls to setInForeground() and visibilityDetermined().
private final SparseArray<String> mVisibilityCallsMap = new SparseArray<String>();
private boolean mIsReleaseAllModerateBindingsCalled;
void assertIsInForeground(final int pid) {
try {
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return mProcessInForegroundMap.get(pid);
}
}));
} catch (InterruptedException ie) {
fail();
}
}
void assertIsInBackground(final int pid) {
try {
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return !mProcessInForegroundMap.get(pid);
}
}));
} catch (InterruptedException ie) {
fail();
}
}
void assertSetInForegroundWasCalled(String message, final int pid) {
try {
assertTrue(message, CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return mProcessInForegroundMap.indexOfKey(pid) >= 0;
}
}));
} catch (InterruptedException ie) {
fail();
}
}
void assertIsReleaseAllModerateBindingsCalled() {
try {
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return mIsReleaseAllModerateBindingsCalled;
}
}));
} catch (InterruptedException ie) {
fail();
}
}
String getVisibilityCalls(int pid) {
synchronized (mVisibilityCallsMap) {
return mVisibilityCallsMap.get(pid);
}
}
boolean isReleaseAllModerateBindingsCalled() {
return mIsReleaseAllModerateBindingsCalled;
}
@Override
public void addNewConnection(int pid, ChildProcessConnection connection) {
synchronized (mVisibilityCallsMap) {
mVisibilityCallsMap.put(pid, "");
}
}
@Override
public void setInForeground(int pid, boolean inForeground) {
mProcessInForegroundMap.put(pid, inForeground);
synchronized (mVisibilityCallsMap) {
if (inForeground) {
mVisibilityCallsMap.put(pid, mVisibilityCallsMap.get(pid) + "FG;");
} else {
mVisibilityCallsMap.put(pid, mVisibilityCallsMap.get(pid) + "BG;");
}
}
}
@Override
public void determinedVisibility(int pid) {
synchronized (mVisibilityCallsMap) {
mVisibilityCallsMap.put(pid, mVisibilityCallsMap.get(pid) + "DETERMINED;");
}
}
@Override
public void onSentToBackground() {}
@Override
public void onBroughtToForeground() {}
@Override
public boolean isOomProtected(int pid) {
return false;
}
@Override
public void clearConnection(int pid) {}
@Override
public void startModerateBindingManagement(
Context context, int maxSize, float lowReduceRatio, float highReduceRatio) {}
@Override
public void releaseAllModerateBindings() {
mIsReleaseAllModerateBindingsCalled = true;
}
}
private MockBindingManager mBindingManager;
// about:version will always be handled by a different renderer than a local file.
private static final String ABOUT_VERSION_PATH = "chrome://version/";
/**
* Verifies that the .setProcessInForeground() signal is called correctly as the tabs are
* created and switched.
*/
@Restriction(RESTRICTION_TYPE_NON_LOW_END_DEVICE)
@LargeTest
@Feature({"ProcessManagement"})
public void testTabSwitching() throws Exception {
// Create two tabs and wait until they are loaded, so that their renderers are around.
final Tab[] tabs = new Tab[2];
final int[] tabIds = new int[2];
final DocumentTabModelSelector selector = ChromeApplication.getDocumentTabModelSelector();
tabIds[0] = launchViaViewIntent(false, URL_1, "Page 1");
tabIds[1] = launchViaLaunchDocumentInstanceInBackground(false, URL_2, "Page 2");
tabs[0] = selector.getTabById(tabIds[0]);
tabs[1] = selector.getTabById(tabIds[1]);
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
// Make sure that the renderers were spawned.
assertTrue(tabs[0].getContentViewCore().getCurrentRenderProcessId() > 0);
assertTrue(tabs[1].getContentViewCore().getCurrentRenderProcessId() > 0);
// Verify that the renderer of the foreground tab was signalled as visible.
mBindingManager.assertIsInForeground(
tabs[0].getContentViewCore().getCurrentRenderProcessId());
// Verify that the renderer of the tab loaded in background was signalled as not
// visible.
mBindingManager.assertIsInBackground(
tabs[1].getContentViewCore().getCurrentRenderProcessId());
}
});
switchToTab((DocumentTab) tabs[1]);
// Verify that the renderer visibility was flipped.
mBindingManager.assertIsInBackground(
tabs[0].getContentViewCore().getCurrentRenderProcessId());
mBindingManager.assertIsInForeground(
tabs[1].getContentViewCore().getCurrentRenderProcessId());
}
/**
* Verifies that a renderer that crashes in foreground has the correct visibility when
* recreated.
*/
@LargeTest
@Feature({"ProcessManagement"})
public void testCrashInForeground() throws Exception {
// Create a tab in foreground and wait until it is loaded.
final Tab tab = ChromeApplication.getDocumentTabModelSelector().getTabById(
launchViaViewIntent(false, URL_1, "Page 1"));
// Kill the renderer and wait for the crash to be noted by the browser process.
assertTrue(ChildProcessLauncher.crashProcessForTesting(
tab.getContentViewCore().getCurrentRenderProcessId()));
assertTrue("Renderer crash wasn't noticed by the browser.",
CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return tab.getContentViewCore().getCurrentRenderProcessId() == 0;
}
}));
// Reload the tab, respawning the renderer.
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
tab.reload();
}
});
// Wait until the process is spawned and its visibility is determined.
assertTrue("Process for the crashed tab was not respawned.",
CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return tab.getContentViewCore().getCurrentRenderProcessId() != 0;
}
}));
mBindingManager.assertSetInForegroundWasCalled(
"isInForeground() was not called for the process.",
tab.getContentViewCore().getCurrentRenderProcessId());
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
// Verify the visibility of the renderer.
mBindingManager.assertIsInForeground(
tab.getContentViewCore().getCurrentRenderProcessId());
}
});
}
/**
* Ensures correctness of the visibilityDetermined() calls, that should be always preceded by
* setInForeground().
*/
@Restriction(RESTRICTION_TYPE_NON_LOW_END_DEVICE)
@LargeTest
@Feature({"ProcessManagement"})
public void testVisibilityDetermined() throws Exception {
// Create a tab in foreground and wait until it is loaded.
final Tab fgTab = ChromeApplication.getDocumentTabModelSelector().getTabById(
launchViaViewIntent(false, URL_1, "Page 1"));
int initialNavigationPid = fgTab.getContentViewCore().getCurrentRenderProcessId();
// Ensure the following calls happened:
// - FG - setInForeground(true) - when the tab is created in the foreground
// - DETERMINED - visibilityDetermined() - after the initial navigation is committed
assertEquals("FG;DETERMINED;", mBindingManager.getVisibilityCalls(initialNavigationPid));
// Navigate to about:version which requires a different renderer.
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
fgTab.loadUrl(new LoadUrlParams(ABOUT_VERSION_PATH));
}
});
ChromeTabUtils.waitForTabPageLoaded(fgTab, ABOUT_VERSION_PATH);
int secondNavigationPid = fgTab.getContentViewCore().getCurrentRenderProcessId();
assertTrue(secondNavigationPid != initialNavigationPid);
// Ensure the following calls happened:
// - BG - setInForeground(false) - when the renderer is created for uncommited frame
// - FG - setInForeground(true) - when the frame is swapped in on commit
// - DETERMINED - visibilityDetermined() - after the navigation is committed
// Or BG -> DETERMINED -> FG is also possible because setInForeground() and
// visibilityDetermined() are triggered from different threads.
mBindingManager.assertIsInForeground(secondNavigationPid);
String visibilityCalls = mBindingManager.getVisibilityCalls(secondNavigationPid);
assertTrue(visibilityCalls, "BG;FG;DETERMINED;".equals(visibilityCalls)
|| "BG;DETERMINED;FG;".equals(visibilityCalls));
// Open a tab in the background and load it.
final Tab bgTab = ChromeApplication.getDocumentTabModelSelector().getTabById(
launchViaLaunchDocumentInstanceInBackground(false, URL_2, "Page 2"));
int bgNavigationPid = bgTab.getContentViewCore().getCurrentRenderProcessId();
// Ensure the following calls happened:
// - BG - setInForeground(false) - when tab is created in the background
// - DETERMINED - visibilityDetermined() - after the navigation is committed
assertEquals("BG;DETERMINED;", mBindingManager.getVisibilityCalls(bgNavigationPid));
}
/**
* Verifies that BindingManager.releaseAllModerateBindings() is called once all the sandboxed
* services are allocated.
*/
@CommandLineFlags.Add(ChildProcessLauncher.SWITCH_NUM_SANDBOXED_SERVICES_FOR_TESTING + "=4")
@LargeTest
@Feature({"ProcessManagement"})
public void testReleaseAllModerateBindings() throws Exception {
launchViaViewIntent(false, URL_1, "Page 1");
launchViaViewIntent(false, URL_1, "Page 1");
launchViaViewIntent(false, URL_1, "Page 1");
// At this point 3 sanboxed services are allocated.
assertFalse(mBindingManager.isReleaseAllModerateBindingsCalled());
launchViaViewIntent(false, URL_1, "Page 1");
// At this point all the sanboxed services are allocated.
mBindingManager.assertIsReleaseAllModerateBindingsCalled();
}
@Override
public void setUp() throws Exception {
super.setUp();
// Hook in the test binding manager.
mBindingManager = new MockBindingManager();
ChildProcessLauncher.setBindingManagerForTesting(mBindingManager);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.render.rtf.rtflib.rtfdoc;
/*
* This file is part of the RTF library of the FOP project, which was originally
* created by Bertrand Delacretaz bdelacretaz@codeconsult.ch and by other
* contributors to the jfor project (www.jfor.org), who agreed to donate jfor to
* the FOP project.
*/
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.fop.render.rtf.rtflib.exceptions.RtfStructureException;
/**
* <p>Models the top-level structure of an RTF file.</p>
*
* <p>This work was authored by Bertrand Delacretaz (bdelacretaz@codeconsult.ch),
* Andreas Putz (a.putz@skynamics.com), and
* Christopher Scott (scottc@westinghouse.com).</p>
*/
public class RtfFile
extends RtfContainer {
private RtfHeader header;
private RtfPageArea pageArea;
private RtfListTable listTable;
private RtfDocumentArea docArea;
// private ConverterLogChannel m_log;
private RtfContainer listTableContainer;
private int listNum;
/**
* Create an RTF file that outputs to the given Writer
* @param w the Writer to write to
* @throws IOException for I/O problems
*/
public RtfFile(Writer w) throws IOException {
super(null, w);
}
/** optional log channel */
// public void setLogChannel(ConverterLogChannel log)
// {
// m_log = log;
// }
/**
* Gets the log channel.
* If logchannel not set, it will return a empty log channel.
* @return our log channel, it is never null */
// ConverterLogChannel getLog()
// {
// if (m_log == null)
// m_log = new ConverterLogChannel (null);
// return m_log;
// }
/**
* If called, must be called before startDocumentArea
* @return the new RtfHeader
* @throws IOException for I/O problems
* @throws RtfStructureException for illegal RTF structure
*/
public RtfHeader startHeader()
throws IOException, RtfStructureException {
if (header != null) {
throw new RtfStructureException("startHeader called more than once");
}
header = new RtfHeader(this, writer);
listTableContainer = new RtfContainer(this, writer);
return header;
}
/**
* Creates the list table.
* @param attr attributes for the RtfListTable
* @return the new RtfListTable
* @throws IOException for I/O problems
*/
public RtfListTable startListTable(RtfAttributes attr)
throws IOException {
listNum++;
if (listTable != null) {
return listTable;
} else {
listTable = new RtfListTable(this, writer, listNum, attr);
listTableContainer.addChild(listTable);
}
return listTable;
}
/**
* Get the list table.
* @return the RtfListTable
*/
public RtfListTable getListTable() {
return listTable;
}
/**
* Closes the RtfHeader if not done yet, and starts the docment area.
* Like startDocumentArea, is only called once. This is not optimal,
* must be able to have multiple page definition, and corresponding
* Document areas
* @return the RtfPageArea
* @throws IOException for I/O problems
* @throws RtfStructureException for illegal RTF structure
*/
public RtfPageArea startPageArea()
throws IOException, RtfStructureException {
if (pageArea != null) {
throw new RtfStructureException("startPageArea called more than once");
}
// create an empty header if there was none
if (header == null) {
startHeader();
}
header.close();
pageArea = new RtfPageArea(this, writer);
addChild(pageArea);
return pageArea;
}
/**
* Call startPageArea if needed and return the page area object.
* @return the RtfPageArea
* @throws IOException for I/O problems
* @throws RtfStructureException for illegal RTF structure
*/
public RtfPageArea getPageArea()
throws IOException, RtfStructureException {
if (pageArea == null) {
return startPageArea();
}
return pageArea;
}
/**
* Closes the RtfHeader if not done yet, and starts the document area.
* Must be called once only.
* @return the RtfDocumentArea
* @throws IOException for I/O problems
* @throws RtfStructureException for illegal RTF structure
*/
public RtfDocumentArea startDocumentArea()
throws IOException, RtfStructureException {
if (docArea != null) {
throw new RtfStructureException("startDocumentArea called more than once");
}
// create an empty header if there was none
if (header == null) {
startHeader();
}
header.close();
docArea = new RtfDocumentArea(this, writer);
addChild(docArea);
return docArea;
}
/**
* Call startDocumentArea if needed and return the document area object.
* @return the RtfDocumentArea
* @throws IOException for I/O problems
* @throws RtfStructureException for illegal RTF structure
*/
public RtfDocumentArea getDocumentArea()
throws IOException, RtfStructureException {
if (docArea == null) {
return startDocumentArea();
}
return docArea;
}
/**
* overridden to write RTF prefix code, what comes before our children
* @throws IOException for I/O problems
*/
protected void writeRtfPrefix() throws IOException {
writeGroupMark(true);
writeControlWord("rtf1");
}
/**
* overridden to write RTF suffix code, what comes after our children
* @throws IOException for I/O problems
*/
protected void writeRtfSuffix() throws IOException {
writeGroupMark(false);
}
/**
* must be called when done creating the document
* @throws IOException for I/O problems
*/
public synchronized void flush() throws IOException {
writeRtf();
writer.flush();
}
/**
* minimal test and usage example
* @param args command-line arguments
* @throws Exception for problems
*/
public static void main(String[] args)
throws Exception {
Writer w = null;
if (args.length != 0) {
final String outFile = args[0];
System.err.println("Outputting RTF to file '" + outFile + "'");
w = new BufferedWriter(new FileWriter(outFile));
} else {
System.err.println("Outputting RTF code to standard output");
w = new BufferedWriter(new OutputStreamWriter(System.out));
}
final RtfFile f = new RtfFile(w);
final RtfSection sect = f.startDocumentArea().newSection();
final RtfParagraph p = sect.newParagraph();
p.newText("Hello, RTF world.\n", null);
final RtfAttributes attr = new RtfAttributes();
attr.set(RtfText.ATTR_BOLD);
attr.set(RtfText.ATTR_ITALIC);
attr.set(RtfText.ATTR_FONT_SIZE, 36);
p.newText("This is bold, italic, 36 points", attr);
f.flush();
System.err.println("RtfFile test: all done.");
}
}
| |
/*
* Copyright (c) 2000-2005 Regents of the University of California.
* All rights reserved.
*
* This software was developed at the University of California, Irvine.
*
* Redistribution and use in source and binary forms are permitted
* provided that the above copyright notice and this paragraph are
* duplicated in all such forms and that any documentation,
* advertising materials, and other materials related to such
* distribution and use acknowledge that the software was developed
* by the University of California, Irvine. The name of the
* University may not be used to endorse or promote products derived
* from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
package edu.uci.isr.xarch.changes;
import org.w3c.dom.*;
import edu.uci.isr.xarch.*;
import java.util.*;
/**
* DOM-Based implementation of the IChanges interface.
*
* @author Automatically generated by xArch apigen.
*/
public class ChangesImpl implements IChanges, DOMBased{
public static final String XSD_TYPE_NSURI = ChangesConstants.NS_URI;
public static final String XSD_TYPE_NAME = "Changes";
protected IXArch xArch;
/** Tag name for ids in this object. */
public static final String ID_ATTR_NAME = "id";
/** Tag name for statuss in this object. */
public static final String STATUS_ATTR_NAME = "status";
/** Tag name for descriptions in this object. */
public static final String DESCRIPTION_ELT_NAME = "description";
/** Tag name for componentChanges in this object. */
public static final String COMPONENT_CHANGE_ELT_NAME = "componentChange";
/** Tag name for linkChanges in this object. */
public static final String LINK_CHANGE_ELT_NAME = "linkChange";
/** Tag name for interactionChanges in this object. */
public static final String INTERACTION_CHANGE_ELT_NAME = "interactionChange";
/** Tag name for statechartChanges in this object. */
public static final String STATECHART_CHANGE_ELT_NAME = "statechartChange";
protected Element elt;
private static SequenceOrder seqOrd = new SequenceOrder(
new QName[]{
new QName(ChangesConstants.NS_URI, DESCRIPTION_ELT_NAME),
new QName(ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME),
new QName(ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME),
new QName(ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME),
new QName(ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME)
}
);
public ChangesImpl(Element elt){
if(elt == null){
throw new IllegalArgumentException("Element cannot be null.");
}
this.elt = elt;
}
public Node getDOMNode(){
return elt;
}
public void setDOMNode(Node node){
if(node.getNodeType() != Node.ELEMENT_NODE){
throw new IllegalArgumentException("Base DOM node of this type must be an Element.");
}
elt = (Element)node;
}
protected static SequenceOrder getSequenceOrder(){
return seqOrd;
}
public void setXArch(IXArch xArch){
this.xArch = xArch;
}
public IXArch getXArch(){
return this.xArch;
}
public IXArchElement cloneElement(int depth){
synchronized(DOMUtils.getDOMLock(elt)){
Document doc = elt.getOwnerDocument();
if(depth == 0){
Element cloneElt = (Element)elt.cloneNode(false);
cloneElt = (Element)doc.importNode(cloneElt, true);
ChangesImpl cloneImpl = new ChangesImpl(cloneElt);
cloneImpl.setXArch(getXArch());
return cloneImpl;
}
else if(depth == 1){
Element cloneElt = (Element)elt.cloneNode(false);
cloneElt = (Element)doc.importNode(cloneElt, true);
ChangesImpl cloneImpl = new ChangesImpl(cloneElt);
cloneImpl.setXArch(getXArch());
NodeList nl = elt.getChildNodes();
int size = nl.getLength();
for(int i = 0; i < size; i++){
Node n = nl.item(i);
Node cloneNode = (Node)n.cloneNode(false);
cloneNode = doc.importNode(cloneNode, true);
cloneElt.appendChild(cloneNode);
}
return cloneImpl;
}
else /* depth = infinity */{
Element cloneElt = (Element)elt.cloneNode(true);
cloneElt = (Element)doc.importNode(cloneElt, true);
ChangesImpl cloneImpl = new ChangesImpl(cloneElt);
cloneImpl.setXArch(getXArch());
return cloneImpl;
}
}
}
//Override 'equals' to be DOM-based...
public boolean equals(Object o){
if(o == null){
return false;
}
if(!(o instanceof DOMBased)){
return super.equals(o);
}
DOMBased db = (DOMBased)o;
Node dbNode = db.getDOMNode();
return dbNode.equals(getDOMNode());
}
//Override 'hashCode' to be based on the underlying node
public int hashCode(){
return getDOMNode().hashCode();
}
/**
* For internal use only.
*/
private static Object makeDerivedWrapper(Element elt, String baseTypeName){
synchronized(DOMUtils.getDOMLock(elt)){
QName typeName = XArchUtils.getXSIType(elt);
if(typeName == null){
return null;
}
else{
if(!DOMUtils.hasXSIType(elt, "http://www.ics.uci.edu/pub/arch/xArch/changes.xsd", baseTypeName)){
try{
String packageTitle = XArchUtils.getPackageTitle(typeName.getNamespaceURI());
String packageName = XArchUtils.getPackageName(packageTitle);
String implName = XArchUtils.getImplName(packageName, typeName.getName());
Class c = Class.forName(implName);
java.lang.reflect.Constructor con = c.getConstructor(new Class[]{Element.class});
Object o = con.newInstance(new Object[]{elt});
return o;
}
catch(Exception e){
//Lots of bad things could happen, but this
//is OK, because this is best-effort anyway.
}
}
return null;
}
}
}
public XArchTypeMetadata getTypeMetadata(){
return IChanges.TYPE_METADATA;
}
public XArchInstanceMetadata getInstanceMetadata(){
return new XArchInstanceMetadata(XArchUtils.getPackageTitle(elt.getNamespaceURI()));
}
/**
* Set the id attribute on this object.
* @param id attribute value.
*/
public void setId(String id){
{
String oldValue = getId();
if(oldValue == null ? id == null : oldValue.equals(id))
return;
DOMUtils.removeAttribute(elt, ChangesConstants.NS_URI, ID_ATTR_NAME);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"id", oldValue,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this), true)
);
}
}
DOMUtils.setAttribute(elt, ChangesConstants.NS_URI, ID_ATTR_NAME, id);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.SET_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"id", id,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
/**
* Removes the id attribute from this object.
*/
public void clearId(){
String oldValue = getId();
if(oldValue == null)
return;
DOMUtils.removeAttribute(elt, ChangesConstants.NS_URI, ID_ATTR_NAME);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"id", oldValue,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
/**
* Gets the value of the id attribute on this object.
* @return id attribute's value or <code>null</code> if that
* attribute is not set.
*/
public String getId(){
return DOMUtils.getAttributeValue(elt, ChangesConstants.NS_URI, ID_ATTR_NAME);
}
/**
* Determines if this object's id attribute has the
* given value.
* @param id value to test.
* @return <code>true</code> if the values match, <code>false</code> otherwise.
* Matching is done by string-matching.
*/
public boolean hasId(String id){
return DOMUtils.objNullEq(getId(), id);
}
/**
* Set the status attribute on this object.
* @param status attribute value.
*/
public void setStatus(String status){
{
String oldValue = getStatus();
if(oldValue == null ? status == null : oldValue.equals(status))
return;
DOMUtils.removeAttribute(elt, ChangesConstants.NS_URI, STATUS_ATTR_NAME);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"status", oldValue,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this), true)
);
}
}
DOMUtils.setAttribute(elt, ChangesConstants.NS_URI, STATUS_ATTR_NAME, status);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.SET_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"status", status,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
/**
* Removes the status attribute from this object.
*/
public void clearStatus(){
String oldValue = getStatus();
if(oldValue == null)
return;
DOMUtils.removeAttribute(elt, ChangesConstants.NS_URI, STATUS_ATTR_NAME);
IXArch _x = getXArch();
if(_x != null){
_x.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ATTRIBUTE_CHANGED,
"status", oldValue,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
/**
* Gets the value of the status attribute on this object.
* @return status attribute's value or <code>null</code> if that
* attribute is not set.
*/
public String getStatus(){
return DOMUtils.getAttributeValue(elt, ChangesConstants.NS_URI, STATUS_ATTR_NAME);
}
/**
* Determines if this object's status attribute has the
* given value.
* @param status value to test.
* @return <code>true</code> if the values match, <code>false</code> otherwise.
* Matching is done by string-matching.
*/
public boolean hasStatus(String status){
return DOMUtils.objNullEq(getStatus(), status);
}
public void setDescription(edu.uci.isr.xarch.instance.IDescription value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
{
edu.uci.isr.xarch.instance.IDescription oldElt = getDescription();
DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"description", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this), true)
);
}
}
Element newChildElt = (Element)(((DOMBased)value).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, ChangesConstants.NS_URI, DESCRIPTION_ELT_NAME);
((DOMBased)value).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.SET_EVENT,
XArchEvent.ELEMENT_CHANGED,
"description", value,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void clearDescription(){
edu.uci.isr.xarch.instance.IDescription oldElt = getDescription();
DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"description", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public edu.uci.isr.xarch.instance.IDescription getDescription(){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, DESCRIPTION_ELT_NAME);
if(nl.getLength() == 0){
return null;
}
else{
Element el = (Element)nl.item(0);
IXArch de = getXArch();
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
return (edu.uci.isr.xarch.instance.IDescription)cachedXArchElt;
}
}
Object o = makeDerivedWrapper(el, "Description");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
return (edu.uci.isr.xarch.instance.IDescription)o;
}
catch(Exception e){}
}
edu.uci.isr.xarch.instance.DescriptionImpl eltImpl = new edu.uci.isr.xarch.instance.DescriptionImpl(el);
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
return eltImpl;
}
}
public boolean hasDescription(edu.uci.isr.xarch.instance.IDescription value){
edu.uci.isr.xarch.instance.IDescription thisValue = getDescription();
edu.uci.isr.xarch.instance.IDescription thatValue = value;
if((thisValue == null) && (thatValue == null)){
return true;
}
else if((thisValue == null) && (thatValue != null)){
return false;
}
else if((thisValue != null) && (thatValue == null)){
return false;
}
return thisValue.isEquivalent(thatValue);
}
public void addComponentChange(IComponentChange newComponentChange){
if(!(newComponentChange instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
Element newChildElt = (Element)(((DOMBased)newComponentChange).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME);
((DOMBased)newComponentChange).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.ADD_EVENT,
XArchEvent.ELEMENT_CHANGED,
"componentChange", newComponentChange,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void addComponentChanges(Collection componentChanges){
for(Iterator en = componentChanges.iterator(); en.hasNext(); ){
IComponentChange elt = (IComponentChange)en.next();
addComponentChange(elt);
}
}
public void clearComponentChanges(){
//DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME);
Collection coll = getAllComponentChanges();
removeComponentChanges(coll);
}
public void removeComponentChange(IComponentChange componentChangeToRemove){
if(!(componentChangeToRemove instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
Node n = nl.item(i);
if(n == ((DOMBased)componentChangeToRemove).getDOMNode()){
synchronized(DOMUtils.getDOMLock(elt)){
elt.removeChild(n);
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.REMOVE_EVENT,
XArchEvent.ELEMENT_CHANGED,
"componentChange", componentChangeToRemove,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
return;
}
}
}
public void removeComponentChanges(Collection componentChanges){
for(Iterator en = componentChanges.iterator(); en.hasNext(); ){
IComponentChange elt = (IComponentChange)en.next();
removeComponentChange(elt);
}
}
public Collection getAllComponentChanges(){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME);
int nlLength = nl.getLength();
ArrayList v = new ArrayList(nlLength);
IXArch de = getXArch();
for(int i = 0; i < nlLength; i++){
Element el = (Element)nl.item(i);
boolean found = false;
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
v.add((IComponentChange)cachedXArchElt);
found = true;
}
}
if(!found){
Object o = makeDerivedWrapper(el, "ComponentChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
v.add((IComponentChange)o);
}
catch(Exception e){
ComponentChangeImpl eltImpl = new ComponentChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
else{
ComponentChangeImpl eltImpl = new ComponentChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
}
return v;
}
public boolean hasComponentChange(IComponentChange componentChangeToCheck){
Collection c = getAllComponentChanges();
for(Iterator en = c.iterator(); en.hasNext(); ){
IComponentChange elt = (IComponentChange)en.next();
if(elt.isEquivalent(componentChangeToCheck)){
return true;
}
}
return false;
}
public Collection hasComponentChanges(Collection componentChangesToCheck){
Vector v = new Vector();
for(Iterator en = componentChangesToCheck.iterator(); en.hasNext(); ){
IComponentChange elt = (IComponentChange)en.next();
v.addElement(new Boolean(hasComponentChange(elt)));
}
return v;
}
public boolean hasAllComponentChanges(Collection componentChangesToCheck){
for(Iterator en = componentChangesToCheck.iterator(); en.hasNext(); ){
IComponentChange elt = (IComponentChange)en.next();
if(!hasComponentChange(elt)){
return false;
}
}
return true;
}
public IComponentChange getComponentChange(String id){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, COMPONENT_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
IComponentChange el = new ComponentChangeImpl((Element)nl.item(i));
if(DOMUtils.objNullEq(id, el.getId())){
Element domElt = (Element)nl.item(i);
Object o = makeDerivedWrapper(domElt, "ComponentChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
return (IComponentChange)o;
}
catch(Exception e){}
}
el.setXArch(getXArch());
return el;
}
}
return null;
}
public Collection getComponentChanges(Collection ids){
//If there is an ID that does not exist, it is simply ignored.
//You can tell if this happened if ids.size() != returned collection.size().
Vector v = new Vector();
for(Iterator en = ids.iterator(); en.hasNext(); ){
String elt = (String)en.next();
IComponentChange retElt = getComponentChange(elt);
if(retElt != null){
v.addElement(retElt);
}
}
return v;
}
public void addLinkChange(ILinkChange newLinkChange){
if(!(newLinkChange instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
Element newChildElt = (Element)(((DOMBased)newLinkChange).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME);
((DOMBased)newLinkChange).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.ADD_EVENT,
XArchEvent.ELEMENT_CHANGED,
"linkChange", newLinkChange,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void addLinkChanges(Collection linkChanges){
for(Iterator en = linkChanges.iterator(); en.hasNext(); ){
ILinkChange elt = (ILinkChange)en.next();
addLinkChange(elt);
}
}
public void clearLinkChanges(){
//DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME);
Collection coll = getAllLinkChanges();
removeLinkChanges(coll);
}
public void removeLinkChange(ILinkChange linkChangeToRemove){
if(!(linkChangeToRemove instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
Node n = nl.item(i);
if(n == ((DOMBased)linkChangeToRemove).getDOMNode()){
synchronized(DOMUtils.getDOMLock(elt)){
elt.removeChild(n);
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.REMOVE_EVENT,
XArchEvent.ELEMENT_CHANGED,
"linkChange", linkChangeToRemove,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
return;
}
}
}
public void removeLinkChanges(Collection linkChanges){
for(Iterator en = linkChanges.iterator(); en.hasNext(); ){
ILinkChange elt = (ILinkChange)en.next();
removeLinkChange(elt);
}
}
public Collection getAllLinkChanges(){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME);
int nlLength = nl.getLength();
ArrayList v = new ArrayList(nlLength);
IXArch de = getXArch();
for(int i = 0; i < nlLength; i++){
Element el = (Element)nl.item(i);
boolean found = false;
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
v.add((ILinkChange)cachedXArchElt);
found = true;
}
}
if(!found){
Object o = makeDerivedWrapper(el, "LinkChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
v.add((ILinkChange)o);
}
catch(Exception e){
LinkChangeImpl eltImpl = new LinkChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
else{
LinkChangeImpl eltImpl = new LinkChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
}
return v;
}
public boolean hasLinkChange(ILinkChange linkChangeToCheck){
Collection c = getAllLinkChanges();
for(Iterator en = c.iterator(); en.hasNext(); ){
ILinkChange elt = (ILinkChange)en.next();
if(elt.isEquivalent(linkChangeToCheck)){
return true;
}
}
return false;
}
public Collection hasLinkChanges(Collection linkChangesToCheck){
Vector v = new Vector();
for(Iterator en = linkChangesToCheck.iterator(); en.hasNext(); ){
ILinkChange elt = (ILinkChange)en.next();
v.addElement(new Boolean(hasLinkChange(elt)));
}
return v;
}
public boolean hasAllLinkChanges(Collection linkChangesToCheck){
for(Iterator en = linkChangesToCheck.iterator(); en.hasNext(); ){
ILinkChange elt = (ILinkChange)en.next();
if(!hasLinkChange(elt)){
return false;
}
}
return true;
}
public ILinkChange getLinkChange(String id){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, LINK_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
ILinkChange el = new LinkChangeImpl((Element)nl.item(i));
if(DOMUtils.objNullEq(id, el.getId())){
Element domElt = (Element)nl.item(i);
Object o = makeDerivedWrapper(domElt, "LinkChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
return (ILinkChange)o;
}
catch(Exception e){}
}
el.setXArch(getXArch());
return el;
}
}
return null;
}
public Collection getLinkChanges(Collection ids){
//If there is an ID that does not exist, it is simply ignored.
//You can tell if this happened if ids.size() != returned collection.size().
Vector v = new Vector();
for(Iterator en = ids.iterator(); en.hasNext(); ){
String elt = (String)en.next();
ILinkChange retElt = getLinkChange(elt);
if(retElt != null){
v.addElement(retElt);
}
}
return v;
}
public void addInteractionChange(IInteractionChange newInteractionChange){
if(!(newInteractionChange instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
Element newChildElt = (Element)(((DOMBased)newInteractionChange).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME);
((DOMBased)newInteractionChange).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.ADD_EVENT,
XArchEvent.ELEMENT_CHANGED,
"interactionChange", newInteractionChange,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void addInteractionChanges(Collection interactionChanges){
for(Iterator en = interactionChanges.iterator(); en.hasNext(); ){
IInteractionChange elt = (IInteractionChange)en.next();
addInteractionChange(elt);
}
}
public void clearInteractionChanges(){
//DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME);
Collection coll = getAllInteractionChanges();
removeInteractionChanges(coll);
}
public void removeInteractionChange(IInteractionChange interactionChangeToRemove){
if(!(interactionChangeToRemove instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
Node n = nl.item(i);
if(n == ((DOMBased)interactionChangeToRemove).getDOMNode()){
synchronized(DOMUtils.getDOMLock(elt)){
elt.removeChild(n);
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.REMOVE_EVENT,
XArchEvent.ELEMENT_CHANGED,
"interactionChange", interactionChangeToRemove,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
return;
}
}
}
public void removeInteractionChanges(Collection interactionChanges){
for(Iterator en = interactionChanges.iterator(); en.hasNext(); ){
IInteractionChange elt = (IInteractionChange)en.next();
removeInteractionChange(elt);
}
}
public Collection getAllInteractionChanges(){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME);
int nlLength = nl.getLength();
ArrayList v = new ArrayList(nlLength);
IXArch de = getXArch();
for(int i = 0; i < nlLength; i++){
Element el = (Element)nl.item(i);
boolean found = false;
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
v.add((IInteractionChange)cachedXArchElt);
found = true;
}
}
if(!found){
Object o = makeDerivedWrapper(el, "InteractionChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
v.add((IInteractionChange)o);
}
catch(Exception e){
InteractionChangeImpl eltImpl = new InteractionChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
else{
InteractionChangeImpl eltImpl = new InteractionChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
}
return v;
}
public boolean hasInteractionChange(IInteractionChange interactionChangeToCheck){
Collection c = getAllInteractionChanges();
for(Iterator en = c.iterator(); en.hasNext(); ){
IInteractionChange elt = (IInteractionChange)en.next();
if(elt.isEquivalent(interactionChangeToCheck)){
return true;
}
}
return false;
}
public Collection hasInteractionChanges(Collection interactionChangesToCheck){
Vector v = new Vector();
for(Iterator en = interactionChangesToCheck.iterator(); en.hasNext(); ){
IInteractionChange elt = (IInteractionChange)en.next();
v.addElement(new Boolean(hasInteractionChange(elt)));
}
return v;
}
public boolean hasAllInteractionChanges(Collection interactionChangesToCheck){
for(Iterator en = interactionChangesToCheck.iterator(); en.hasNext(); ){
IInteractionChange elt = (IInteractionChange)en.next();
if(!hasInteractionChange(elt)){
return false;
}
}
return true;
}
public IInteractionChange getInteractionChange(String id){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, INTERACTION_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
IInteractionChange el = new InteractionChangeImpl((Element)nl.item(i));
if(DOMUtils.objNullEq(id, el.getId())){
Element domElt = (Element)nl.item(i);
Object o = makeDerivedWrapper(domElt, "InteractionChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
return (IInteractionChange)o;
}
catch(Exception e){}
}
el.setXArch(getXArch());
return el;
}
}
return null;
}
public Collection getInteractionChanges(Collection ids){
//If there is an ID that does not exist, it is simply ignored.
//You can tell if this happened if ids.size() != returned collection.size().
Vector v = new Vector();
for(Iterator en = ids.iterator(); en.hasNext(); ){
String elt = (String)en.next();
IInteractionChange retElt = getInteractionChange(elt);
if(retElt != null){
v.addElement(retElt);
}
}
return v;
}
public void addStatechartChange(IStatechartChange newStatechartChange){
if(!(newStatechartChange instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
Element newChildElt = (Element)(((DOMBased)newStatechartChange).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME);
((DOMBased)newStatechartChange).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.ADD_EVENT,
XArchEvent.ELEMENT_CHANGED,
"statechartChange", newStatechartChange,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void addStatechartChanges(Collection statechartChanges){
for(Iterator en = statechartChanges.iterator(); en.hasNext(); ){
IStatechartChange elt = (IStatechartChange)en.next();
addStatechartChange(elt);
}
}
public void clearStatechartChanges(){
//DOMUtils.removeChildren(elt, ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME);
Collection coll = getAllStatechartChanges();
removeStatechartChanges(coll);
}
public void removeStatechartChange(IStatechartChange statechartChangeToRemove){
if(!(statechartChangeToRemove instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
Node n = nl.item(i);
if(n == ((DOMBased)statechartChangeToRemove).getDOMNode()){
synchronized(DOMUtils.getDOMLock(elt)){
elt.removeChild(n);
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.REMOVE_EVENT,
XArchEvent.ELEMENT_CHANGED,
"statechartChange", statechartChangeToRemove,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
return;
}
}
}
public void removeStatechartChanges(Collection statechartChanges){
for(Iterator en = statechartChanges.iterator(); en.hasNext(); ){
IStatechartChange elt = (IStatechartChange)en.next();
removeStatechartChange(elt);
}
}
public Collection getAllStatechartChanges(){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME);
int nlLength = nl.getLength();
ArrayList v = new ArrayList(nlLength);
IXArch de = getXArch();
for(int i = 0; i < nlLength; i++){
Element el = (Element)nl.item(i);
boolean found = false;
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
v.add((IStatechartChange)cachedXArchElt);
found = true;
}
}
if(!found){
Object o = makeDerivedWrapper(el, "StatechartChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
v.add((IStatechartChange)o);
}
catch(Exception e){
StatechartChangeImpl eltImpl = new StatechartChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
else{
StatechartChangeImpl eltImpl = new StatechartChangeImpl((Element)nl.item(i));
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
v.add(eltImpl);
}
}
}
return v;
}
public boolean hasStatechartChange(IStatechartChange statechartChangeToCheck){
Collection c = getAllStatechartChanges();
for(Iterator en = c.iterator(); en.hasNext(); ){
IStatechartChange elt = (IStatechartChange)en.next();
if(elt.isEquivalent(statechartChangeToCheck)){
return true;
}
}
return false;
}
public Collection hasStatechartChanges(Collection statechartChangesToCheck){
Vector v = new Vector();
for(Iterator en = statechartChangesToCheck.iterator(); en.hasNext(); ){
IStatechartChange elt = (IStatechartChange)en.next();
v.addElement(new Boolean(hasStatechartChange(elt)));
}
return v;
}
public boolean hasAllStatechartChanges(Collection statechartChangesToCheck){
for(Iterator en = statechartChangesToCheck.iterator(); en.hasNext(); ){
IStatechartChange elt = (IStatechartChange)en.next();
if(!hasStatechartChange(elt)){
return false;
}
}
return true;
}
public IStatechartChange getStatechartChange(String id){
NodeList nl = DOMUtils.getChildren(elt, ChangesConstants.NS_URI, STATECHART_CHANGE_ELT_NAME);
for(int i = 0; i < nl.getLength(); i++){
IStatechartChange el = new StatechartChangeImpl((Element)nl.item(i));
if(DOMUtils.objNullEq(id, el.getId())){
Element domElt = (Element)nl.item(i);
Object o = makeDerivedWrapper(domElt, "StatechartChange");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
return (IStatechartChange)o;
}
catch(Exception e){}
}
el.setXArch(getXArch());
return el;
}
}
return null;
}
public Collection getStatechartChanges(Collection ids){
//If there is an ID that does not exist, it is simply ignored.
//You can tell if this happened if ids.size() != returned collection.size().
Vector v = new Vector();
for(Iterator en = ids.iterator(); en.hasNext(); ){
String elt = (String)en.next();
IStatechartChange retElt = getStatechartChange(elt);
if(retElt != null){
v.addElement(retElt);
}
}
return v;
}
public boolean isEqual(IChanges ChangesToCheck){
String thisId = getId();
String thatId = ChangesToCheck.getId();
if((thisId == null) || (thatId == null)){
throw new IllegalArgumentException("One of the arguments is missing an ID.");
}
return thisId.equals(thatId);
}
public boolean isEquivalent(IChanges c){
return (getClass().equals(c.getClass())) &&
hasStatus(c.getStatus()) &&
hasDescription(c.getDescription()) &&
hasAllComponentChanges(c.getAllComponentChanges()) &&
c.hasAllComponentChanges(getAllComponentChanges()) &&
hasAllLinkChanges(c.getAllLinkChanges()) &&
c.hasAllLinkChanges(getAllLinkChanges()) &&
hasAllInteractionChanges(c.getAllInteractionChanges()) &&
c.hasAllInteractionChanges(getAllInteractionChanges()) &&
hasAllStatechartChanges(c.getAllStatechartChanges()) &&
c.hasAllStatechartChanges(getAllStatechartChanges()) ;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.configuration;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.coordination.CoordinationMetadata;
import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.node.DiscoveryNodes.Builder;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Names;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.elasticsearch.cluster.ClusterState.builder;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.startsWith;
public class TransportClearVotingConfigExclusionsActionTests extends ESTestCase {
private static ThreadPool threadPool;
private static ClusterService clusterService;
private static DiscoveryNode localNode, otherNode1, otherNode2;
private static VotingConfigExclusion otherNode1Exclusion, otherNode2Exclusion;
private TransportService transportService;
@BeforeClass
public static void createThreadPoolAndClusterService() {
threadPool = new TestThreadPool("test", Settings.EMPTY);
localNode = new DiscoveryNode("local", buildNewFakeTransportAddress(), Version.CURRENT);
otherNode1 = new DiscoveryNode("other1", "other1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
otherNode1Exclusion = new VotingConfigExclusion(otherNode1);
otherNode2 = new DiscoveryNode("other2", "other2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
otherNode2Exclusion = new VotingConfigExclusion(otherNode2);
clusterService = createClusterService(threadPool, localNode);
}
@AfterClass
public static void shutdownThreadPoolAndClusterService() {
clusterService.stop();
threadPool.shutdown();
}
@Before
public void setupForTest() {
final MockTransport transport = new MockTransport();
transportService = transport.createTransportService(Settings.EMPTY, threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddress -> localNode, null, emptySet());
new TransportClearVotingConfigExclusionsAction(transportService, clusterService, threadPool, new ActionFilters(emptySet()),
new IndexNameExpressionResolver()); // registers action
transportService.start();
transportService.acceptIncomingRequests();
final ClusterState.Builder builder = builder(new ClusterName("cluster"))
.nodes(new Builder().add(localNode).add(otherNode1).add(otherNode2)
.localNodeId(localNode.getId()).masterNodeId(localNode.getId()));
builder.metadata(Metadata.builder()
.coordinationMetadata(CoordinationMetadata.builder()
.addVotingConfigExclusion(otherNode1Exclusion)
.addVotingConfigExclusion(otherNode2Exclusion)
.build()));
setState(clusterService, builder);
}
public void testClearsVotingConfigExclusions() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final SetOnce<ClearVotingConfigExclusionsResponse> responseHolder = new SetOnce<>();
final ClearVotingConfigExclusionsRequest clearVotingConfigExclusionsRequest = new ClearVotingConfigExclusionsRequest();
clearVotingConfigExclusionsRequest.setWaitForRemoval(false);
transportService.sendRequest(localNode, ClearVotingConfigExclusionsAction.NAME,
clearVotingConfigExclusionsRequest,
expectSuccess(r -> {
responseHolder.set(r);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertNotNull(responseHolder.get());
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(), empty());
}
public void testTimesOutIfWaitingForNodesThatAreNotRemoved() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final SetOnce<TransportException> responseHolder = new SetOnce<>();
final ClearVotingConfigExclusionsRequest clearVotingConfigExclusionsRequest = new ClearVotingConfigExclusionsRequest();
clearVotingConfigExclusionsRequest.setTimeout(TimeValue.timeValueMillis(100));
transportService.sendRequest(localNode, ClearVotingConfigExclusionsAction.NAME,
clearVotingConfigExclusionsRequest,
expectError(e -> {
responseHolder.set(e);
countDownLatch.countDown();
})
);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion));
final Throwable rootCause = responseHolder.get().getRootCause();
assertThat(rootCause, instanceOf(ElasticsearchTimeoutException.class));
assertThat(rootCause.getMessage(),
startsWith("timed out waiting for removal of nodes; if nodes should not be removed, set waitForRemoval to false. ["));
}
public void testSucceedsIfNodesAreRemovedWhileWaiting() throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final SetOnce<ClearVotingConfigExclusionsResponse> responseHolder = new SetOnce<>();
transportService.sendRequest(localNode, ClearVotingConfigExclusionsAction.NAME,
new ClearVotingConfigExclusionsRequest(),
expectSuccess(r -> {
responseHolder.set(r);
countDownLatch.countDown();
})
);
final ClusterState.Builder builder = builder(clusterService.state());
builder.nodes(DiscoveryNodes.builder(clusterService.state().nodes()).remove(otherNode1).remove(otherNode2));
setState(clusterService, builder);
assertTrue(countDownLatch.await(30, TimeUnit.SECONDS));
assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(), empty());
}
private TransportResponseHandler<ClearVotingConfigExclusionsResponse> expectSuccess(
Consumer<ClearVotingConfigExclusionsResponse> onResponse) {
return responseHandler(onResponse, e -> {
throw new AssertionError("unexpected", e);
});
}
private TransportResponseHandler<ClearVotingConfigExclusionsResponse> expectError(Consumer<TransportException> onException) {
return responseHandler(r -> {
assert false : r;
}, onException);
}
private TransportResponseHandler<ClearVotingConfigExclusionsResponse> responseHandler(
Consumer<ClearVotingConfigExclusionsResponse> onResponse, Consumer<TransportException> onException) {
return new TransportResponseHandler<ClearVotingConfigExclusionsResponse>() {
@Override
public void handleResponse(ClearVotingConfigExclusionsResponse response) {
onResponse.accept(response);
}
@Override
public void handleException(TransportException exp) {
onException.accept(exp);
}
@Override
public String executor() {
return Names.SAME;
}
@Override
public ClearVotingConfigExclusionsResponse read(StreamInput in) throws IOException {
return new ClearVotingConfigExclusionsResponse(in);
}
};
}
}
| |
/*
* generated by Xtext
*/
package fr.obeo.dsl.ui.contentassist;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.xtext.*;
import org.eclipse.xtext.ui.editor.contentassist.ICompletionProposalAcceptor;
import org.eclipse.xtext.ui.editor.contentassist.ContentAssistContext;
/**
* Represents a generated, default implementation of superclass {@link org.eclipse.xtext.xbase.annotations.ui.contentassist.XbaseWithAnnotationsProposalProvider}.
* Methods are dynamically dispatched on the first parameter, i.e., you can override them
* with a more concrete subtype.
*/
@SuppressWarnings("all")
public class AbstractSPrototyperProposalProvider extends org.eclipse.xtext.xbase.annotations.ui.contentassist.XbaseWithAnnotationsProposalProvider {
public void completeSPrototyper_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPrototyper_Qualifier(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPrototyper_TargetURI(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPrototyper_Viewpoints(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPViewpoint_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPViewpoint_Shortcut(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPViewpoint_Extension(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPViewpoint_ServiceClass(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPViewpoint_Representations(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Label(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Title(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Usages(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Root(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
lookupCrossReference(((CrossReference)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Elements(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPTable_Properties(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableElement_Recursive(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeTableElement_EClass(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
lookupCrossReference(((CrossReference)assignment.getTerminal()), context, acceptor);
}
public void completeTableElement_Expression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableElement_Creatable(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeTableElement_CreateExpression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableElement_SubElements(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableProperty_Feature(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableProperty_Label(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeTableProperty_Expression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Label(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Title(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Metamodels(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Root(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSPDiagram_Elements(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_Creatable(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeContainer_Recursive(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeContainer_ContainerType(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_EClass(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_Expression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_Style(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainer_Elements(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainerStyleDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainerStyleDefinition_Label(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeContainerStyleDefinition_Border(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNode_Creatable(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeNode_Name(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNode_EClass(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNode_Expression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNode_Style(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNodeStyleDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNodeStyleDefinition_Label(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeNodeStyleDefinition_Border(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeSolidColorDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeGradientColorDefinition_From(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeGradientColorDefinition_To(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeLabelStyleDefinition_Expression(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeLabelStyleDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeLabelStyleDefinition_Size(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeLabelStyleDefinition_Bold(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeLabelStyleDefinition_Italic(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void completeBorderStyleDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeBorderStyleDefinition_Size(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeAcceleoExpression_Value(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeVarRef_Value(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeFeatureRef_Value(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeServiceRef_Value(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completeMetamodelUsage_Usage(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
lookupCrossReference(((CrossReference)assignment.getTerminal()), context, acceptor);
}
public void completeJavaServiceClassReference_JavaClass(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void completePreDefinedColorDefinition_Color(EObject model, Assignment assignment, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
completeRuleCall(((RuleCall)assignment.getTerminal()), context, acceptor);
}
public void complete_SPrototyper(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SPViewpoint(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SPRepresentation(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SPTable(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_TableElement(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_TableProperty(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SPDiagram(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_DiagramElement(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_Container(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_ContainerStyleDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_ContainerType(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_ContainerColorDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_Node(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_NodeStyleDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SolidColorDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_GradientColorDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_LabelStyleDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_BorderStyleDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_SPExpression(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_RequestExpression(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_RequestOrCreateExpression(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_AcceleoExpression(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_VarRef(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_FeatureRef(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_ServiceRef(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_MetamodelUsage(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_JavaServiceClassReference(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_Color(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_PreDefinedColorDefinition(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
public void complete_PreDefinedColor(EObject model, RuleCall ruleCall, ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
// subclasses may override
}
}
| |
package com.timpo.batphone.messengers;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.ListenableFuture;
import com.timpo.batphone.handlers.EventHandler;
import com.timpo.batphone.handlers.Handler;
import com.timpo.batphone.handlers.RequestHandler;
import com.timpo.batphone.messages.Event;
import com.timpo.batphone.messages.Message;
import com.timpo.batphone.messages.Request;
import com.timpo.batphone.other.Utils;
import com.timpo.batphone.responsemappers.ResponseMapperImpl;
import com.timpo.batphone.responsemappers.ResponseMapper;
import com.timpo.batphone.transports.TopicMessage;
import com.timpo.batphone.transports.Transport;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.regex.Pattern;
import org.slf4j.Logger;
public class MessengerImpl implements Messenger {
private static final Logger LOG = Utils.logFor(MessengerImpl.class);
private static final EventHandler EMPTY_EVENT_HANDLER = new EventHandler() {
@Override
public void handle(Event event, String topic) {
LOG.warn("empty event handler called for {} : {}", topic, event);
}
};
private static final RequestHandler EMPTY_REQUEST_HANDLER = new RequestHandler() {
@Override
public Optional<Map<String, Object>> handle(Request request, String topic) {
LOG.warn("empty request handler called for {} : {}", topic, request);
return Optional.absent();
}
};
//
private final String serviceID;
private final String serviceGroup;
private final ConcurrentMap<String, RequestHandler> requestHandlers;
private final ConcurrentMap<String, EventHandler> eventHandlers;
private final ConcurrentMap<Pattern, EventHandler> eventWildcardHandlers;
private final Transport<TopicMessage<Request>> requestTransport;
private final Transport<TopicMessage<Request>> responseTransport;
private final Transport<TopicMessage<Event>> eventTransport;
private final ResponseMapper<Request> responseMapper;
public MessengerImpl(String serviceID, String serviceGroup,
Transport<TopicMessage<Request>> requestTransport, Transport<TopicMessage<Request>> responseTransport,
Transport<TopicMessage<Event>> eventTransport, ExecutorService es) throws Exception {
this.serviceID = serviceID;
this.serviceGroup = serviceGroup;
this.requestTransport = requestTransport;
this.responseTransport = responseTransport;
this.eventTransport = eventTransport;
requestHandlers = new ConcurrentHashMap<>();
eventHandlers = new ConcurrentHashMap<>();
eventWildcardHandlers = new ConcurrentHashMap<>();
responseMapper = new ResponseMapperImpl(es);
//responses are sent to the service id
this.responseTransport.listenFor(serviceID);
this.requestTransport.onMessage(new MessengerRequestHandler());
this.responseTransport.onMessage(new MessengerResponseHandler());
this.eventTransport.onMessage(new MessengerEventHandler());
}
@Override
public String getServiceID() {
return serviceID;
}
@Override
public String getServiceGroup() {
return serviceGroup;
}
@Override
public void onRequest(RequestHandler handler, String... topics) {
validateTopics(topics);
for (String topic : topics) {
requestHandlers.put(topic, handler);
requestTransport.listenFor(topic);
}
}
@Override
public void onEvent(EventHandler handler, String... topics) {
validateTopics(topics);
for (String topic : topics) {
//even though wildcard topics can never be pulled directly, we
//have the optimization below that grabs the only thing in this map
//if only one entry exists, so we need to make sure this handler is
//in eventHandlers
eventHandlers.put(topic, handler);
//there's no need to specially track a topic that doesn't use wildcards
if (Utils.isWildcard(topic)) {
//TODO: do we need to modify the pattern if we know we're using
//rabbitmq, since it doesn't use regex?
eventWildcardHandlers.put(Pattern.compile(topic), handler);
}
eventTransport.listenFor(topic);
}
}
@Override
public ListenableFuture<Request> request(Object dataObject, String... pipeline) throws Exception {
if (pipeline.length == 0) {
throw new IllegalArgumentException("pipeline must contain at least one destination");
}
for (String topic : pipeline) {
if (topic == null || topic.isEmpty()) {
throw new IllegalArgumentException("cannot pipe request to null or empty topic");
}
}
Map<String, Object> data = Utils.convertToMap(dataObject);
Request req = new Request();
req.setFrom(serviceID);
req.setTo(pipeline);
req.setRequestID(Utils.simpleID());
req.setData(data);
//we do the remove so that the receiver doesn't see itself in the 'to' field
String destination = req.getTo().remove(0);
req.setTimeToNow();
ListenableFuture<Request> future = responseMapper.makeFuture(req.getRequestID());
requestTransport.send(new TopicMessage(destination, req));
return future;
}
@Override
public void notify(Object body, String... topics) throws Exception {
if (topics.length == 0) {
throw new IllegalArgumentException("topics must contain at least one topic");
}
Map<String, Object> data = Utils.convertToMap(body);
Event event = new Event();
event.setFrom(serviceID);
event.setData(data);
//event.to isn't set because events don't pipeline, so it's always empty
event.setTimeToNow();
//we do the remove so that the receiver doesn't see itself in the 'to' field
for (String topic : topics) {
eventTransport.send(new TopicMessage<>(topic, event));
}
}
@Override
public void start() {
requestTransport.start();
responseTransport.start();
eventTransport.start();
}
@Override
public void stop() {
requestTransport.stop();
responseTransport.stop();
eventTransport.stop();
}
private void mergeData(Message req, Map<String, Object> resData) {
for (Map.Entry<String, Object> entry : resData.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
req.getData().put(key, value);
}
}
private RequestHandler getRequestHandler(TopicMessage<Request> cm) {
//if there's only one request handler, just return it
if (requestHandlers.size() == 1) {
return requestHandlers.values().iterator().next();
}
//requests don't use wildcards, so we can always just check the map
RequestHandler rh = requestHandlers.get(cm.getTopic());
if (rh != null) {
return rh;
}
return EMPTY_REQUEST_HANDLER;
}
private EventHandler getEventHandler(TopicMessage<Event> cm) {
//if there's only one event handler, just return it: it will save lots
//of time, particularly with wildcards
if (eventHandlers.size() == 1) {
return eventHandlers.values().iterator().next();
}
EventHandler eh = eventHandlers.get(cm.getTopic());
if (eh != null) {
return eh;
}
//if we don't have this topic in our event handlers, it was likely a wildcard
for (Map.Entry<Pattern, EventHandler> entry : eventWildcardHandlers.entrySet()) {
Pattern pattern = entry.getKey();
eh = entry.getValue();
if (pattern.matcher(cm.getTopic()).matches()) {
return eh;
}
}
return EMPTY_EVENT_HANDLER;
}
@Override
public void shutdown() {
stop();
requestTransport.shutdown();
responseTransport.shutdown();
eventTransport.shutdown();
}
private void validateTopics(String[] topics) {
if (topics.length == 0) {
throw new IllegalArgumentException("cannot bind handler to empty topics list");
}
for (String topic : topics) {
if (topic == null || topic.isEmpty()) {
throw new IllegalArgumentException("cannot bind handler to null or empty topic");
}
}
}
private class MessengerRequestHandler implements Handler<TopicMessage<Request>> {
private final Logger LOG = Utils.logFor(MessengerRequestHandler.class);
@Override
public void handle(TopicMessage<Request> cm) {
LOG.debug("handle: {}", cm);
try {
Request request = cm.getMessage();
RequestHandler rh = getRequestHandler(cm);
Optional<Map<String, Object>> response = rh.handle(request, cm.getTopic());
//some requests might have their response data merged in
//directly, otherwise we can merge it in
if (response.isPresent()) {
mergeData(request, response.get());
}
String destination;
if (!request.getTo().isEmpty()) {
//either forward this request to the next destination in
//the 'to' field...
destination = request.getTo().remove(0);
//remove it so the receiver doesn't see themselves in the 'to'
//field and cause a loop
} else {
//... or respond back to the original sender
destination = request.getFrom().get(0);
}
request.setTimeToNow();
requestTransport.send(new TopicMessage<>(destination, request));
} catch (Exception ex) {
LOG.warn("problem handling request", ex);
}
}
}
private class MessengerResponseHandler implements Handler<TopicMessage<Request>> {
private final Logger LOG = Utils.logFor(MessengerResponseHandler.class);
@Override
public void handle(TopicMessage<Request> cm) {
LOG.debug("handle: {}", cm);
try {
//this is a response, since only they go directly to a serviceID
responseMapper.resolveResponse(cm.getMessage());
} catch (Exception ex) {
LOG.warn("problem handling response", ex);
}
}
}
private class MessengerEventHandler implements Handler<TopicMessage<Event>> {
private final Logger LOG = Utils.logFor(MessengerEventHandler.class);
@Override
public void handle(TopicMessage<Event> cm) {
LOG.debug("handle: {}", cm);
try {
EventHandler eh = getEventHandler(cm);
eh.handle(cm.getMessage(), cm.getTopic());
} catch (Exception ex) {
LOG.warn("problem handling event", ex);
}
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.datapipeline.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Contains the parameters for PutPipelineDefinition.
* </p>
*/
public class PutPipelineDefinitionRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The ID of the pipeline.
* </p>
*/
private String pipelineId;
/**
* <p>
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<PipelineObject> pipelineObjects;
/**
* <p>
* The parameter objects used with the pipeline.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ParameterObject> parameterObjects;
/**
* <p>
* The parameter values used with the pipeline.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ParameterValue> parameterValues;
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @param pipelineId
* The ID of the pipeline.
*/
public void setPipelineId(String pipelineId) {
this.pipelineId = pipelineId;
}
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @return The ID of the pipeline.
*/
public String getPipelineId() {
return this.pipelineId;
}
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @param pipelineId
* The ID of the pipeline.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withPipelineId(String pipelineId) {
setPipelineId(pipelineId);
return this;
}
/**
* <p>
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* </p>
*
* @return The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
*/
public java.util.List<PipelineObject> getPipelineObjects() {
if (pipelineObjects == null) {
pipelineObjects = new com.amazonaws.internal.SdkInternalList<PipelineObject>();
}
return pipelineObjects;
}
/**
* <p>
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* </p>
*
* @param pipelineObjects
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
*/
public void setPipelineObjects(
java.util.Collection<PipelineObject> pipelineObjects) {
if (pipelineObjects == null) {
this.pipelineObjects = null;
return;
}
this.pipelineObjects = new com.amazonaws.internal.SdkInternalList<PipelineObject>(
pipelineObjects);
}
/**
* <p>
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setPipelineObjects(java.util.Collection)} or
* {@link #withPipelineObjects(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param pipelineObjects
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withPipelineObjects(
PipelineObject... pipelineObjects) {
if (this.pipelineObjects == null) {
setPipelineObjects(new com.amazonaws.internal.SdkInternalList<PipelineObject>(
pipelineObjects.length));
}
for (PipelineObject ele : pipelineObjects) {
this.pipelineObjects.add(ele);
}
return this;
}
/**
* <p>
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* </p>
*
* @param pipelineObjects
* The objects that define the pipeline. These objects overwrite the
* existing pipeline definition.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withPipelineObjects(
java.util.Collection<PipelineObject> pipelineObjects) {
setPipelineObjects(pipelineObjects);
return this;
}
/**
* <p>
* The parameter objects used with the pipeline.
* </p>
*
* @return The parameter objects used with the pipeline.
*/
public java.util.List<ParameterObject> getParameterObjects() {
if (parameterObjects == null) {
parameterObjects = new com.amazonaws.internal.SdkInternalList<ParameterObject>();
}
return parameterObjects;
}
/**
* <p>
* The parameter objects used with the pipeline.
* </p>
*
* @param parameterObjects
* The parameter objects used with the pipeline.
*/
public void setParameterObjects(
java.util.Collection<ParameterObject> parameterObjects) {
if (parameterObjects == null) {
this.parameterObjects = null;
return;
}
this.parameterObjects = new com.amazonaws.internal.SdkInternalList<ParameterObject>(
parameterObjects);
}
/**
* <p>
* The parameter objects used with the pipeline.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setParameterObjects(java.util.Collection)} or
* {@link #withParameterObjects(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameterObjects
* The parameter objects used with the pipeline.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withParameterObjects(
ParameterObject... parameterObjects) {
if (this.parameterObjects == null) {
setParameterObjects(new com.amazonaws.internal.SdkInternalList<ParameterObject>(
parameterObjects.length));
}
for (ParameterObject ele : parameterObjects) {
this.parameterObjects.add(ele);
}
return this;
}
/**
* <p>
* The parameter objects used with the pipeline.
* </p>
*
* @param parameterObjects
* The parameter objects used with the pipeline.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withParameterObjects(
java.util.Collection<ParameterObject> parameterObjects) {
setParameterObjects(parameterObjects);
return this;
}
/**
* <p>
* The parameter values used with the pipeline.
* </p>
*
* @return The parameter values used with the pipeline.
*/
public java.util.List<ParameterValue> getParameterValues() {
if (parameterValues == null) {
parameterValues = new com.amazonaws.internal.SdkInternalList<ParameterValue>();
}
return parameterValues;
}
/**
* <p>
* The parameter values used with the pipeline.
* </p>
*
* @param parameterValues
* The parameter values used with the pipeline.
*/
public void setParameterValues(
java.util.Collection<ParameterValue> parameterValues) {
if (parameterValues == null) {
this.parameterValues = null;
return;
}
this.parameterValues = new com.amazonaws.internal.SdkInternalList<ParameterValue>(
parameterValues);
}
/**
* <p>
* The parameter values used with the pipeline.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setParameterValues(java.util.Collection)} or
* {@link #withParameterValues(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameterValues
* The parameter values used with the pipeline.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withParameterValues(
ParameterValue... parameterValues) {
if (this.parameterValues == null) {
setParameterValues(new com.amazonaws.internal.SdkInternalList<ParameterValue>(
parameterValues.length));
}
for (ParameterValue ele : parameterValues) {
this.parameterValues.add(ele);
}
return this;
}
/**
* <p>
* The parameter values used with the pipeline.
* </p>
*
* @param parameterValues
* The parameter values used with the pipeline.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public PutPipelineDefinitionRequest withParameterValues(
java.util.Collection<ParameterValue> parameterValues) {
setParameterValues(parameterValues);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPipelineId() != null)
sb.append("PipelineId: " + getPipelineId() + ",");
if (getPipelineObjects() != null)
sb.append("PipelineObjects: " + getPipelineObjects() + ",");
if (getParameterObjects() != null)
sb.append("ParameterObjects: " + getParameterObjects() + ",");
if (getParameterValues() != null)
sb.append("ParameterValues: " + getParameterValues());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof PutPipelineDefinitionRequest == false)
return false;
PutPipelineDefinitionRequest other = (PutPipelineDefinitionRequest) obj;
if (other.getPipelineId() == null ^ this.getPipelineId() == null)
return false;
if (other.getPipelineId() != null
&& other.getPipelineId().equals(this.getPipelineId()) == false)
return false;
if (other.getPipelineObjects() == null
^ this.getPipelineObjects() == null)
return false;
if (other.getPipelineObjects() != null
&& other.getPipelineObjects().equals(this.getPipelineObjects()) == false)
return false;
if (other.getParameterObjects() == null
^ this.getParameterObjects() == null)
return false;
if (other.getParameterObjects() != null
&& other.getParameterObjects().equals(
this.getParameterObjects()) == false)
return false;
if (other.getParameterValues() == null
^ this.getParameterValues() == null)
return false;
if (other.getParameterValues() != null
&& other.getParameterValues().equals(this.getParameterValues()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getPipelineId() == null) ? 0 : getPipelineId().hashCode());
hashCode = prime
* hashCode
+ ((getPipelineObjects() == null) ? 0 : getPipelineObjects()
.hashCode());
hashCode = prime
* hashCode
+ ((getParameterObjects() == null) ? 0 : getParameterObjects()
.hashCode());
hashCode = prime
* hashCode
+ ((getParameterValues() == null) ? 0 : getParameterValues()
.hashCode());
return hashCode;
}
@Override
public PutPipelineDefinitionRequest clone() {
return (PutPipelineDefinitionRequest) super.clone();
}
}
| |
package org.ahuh.flickr.sorter;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.swing.SwingUtilities;
import org.ahuh.flickr.sorter.bean.CollectionBean;
import org.ahuh.flickr.sorter.bean.PhotoSetBean;
import org.ahuh.flickr.sorter.constants.FlickrSorterConstants;
import org.ahuh.flickr.sorter.exception.AppException;
import org.ahuh.flickr.sorter.gui.GUIApplication;
import org.ahuh.flickr.sorter.service.AuthenticationService;
import org.ahuh.flickr.sorter.service.CollectionService;
import org.ahuh.flickr.sorter.service.PhotoSetService;
import org.ahuh.flickr.sorter.tasks.ReorderCollectionTask;
import org.ahuh.flickr.sorter.tasks.ReorderPhotoSetTask;
import org.apache.log4j.Logger;
import com.flickr4java.flickr.Flickr;
import com.flickr4java.flickr.REST;
public class FlickrSorterLauncher extends GUIApplication {
/**
* Logger
*/
private static Logger log = Logger.getLogger(FlickrSorterLauncher.class);
/**
* Static properties (shared)
*/
private static boolean processIsRunning = false;
private static boolean errorsInTasks = false;
/**
* Main execution method
* @param args
*/
public static void main(String[] args) {
try {
// Prevent several instances of same program
checkLock();
// Schedule a job for the event-dispatching thread:
// adding TrayIcon
SwingUtilities.invokeLater(new Runnable() {
public void run() {
createAndShowGUI();
plugGUIToProcess();
}
});
}
catch (Exception e) {
String errorMessage = "An error occurred:\n\n" + e.getMessage();
log.error(errorMessage, e);
displayErrorPopup(errorMessage);
}
finally {
terminateGUI();
}
}
/**
* Plug GUI to process
*/
protected static void plugGUIToProcess() {
// Launch process on Start button click
startButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent evt) {
process();
}
});
}
/**
* Process
* @param args
*/
protected static void process() {
// Declare new thread
Thread thread = new Thread(new Runnable()
{
// Run in new thread
public void run() {
if (processIsRunning) {
// Process is already running
log.debug("Trying to launch new process : blocked because a process is already running");
}
else {
// Process is not running : launch it and lock the running flag
processIsRunning = true;
startButton.setEnabled(false);
AuthenticationService authService = null;
PhotoSetService psService = null;
CollectionService colService = null;
try {
// Init Flickr API calls
Flickr flickr = new Flickr(FlickrSorterConstants.API_KEY, FlickrSorterConstants.SHARED_SECRET, new REST());
// Authentication
authenticate(flickr, authService);
// Reorder PhotoSets
List<PhotoSetBean> psBeanList = new ArrayList<PhotoSetBean>();
int countReorderPS = reorderPhotoSets(flickr, psService, psBeanList);
// Reorder Collections (multithreading)
int countReorderC = reorderCollections(flickr, colService);
// Reorder Photos (multithreading)
int countReorderP = reorderPhotos(flickr, psBeanList);
if (errorsInTasks) {
String errorMessage = "Errors occurred during the process: check logs to see the details";
displayErrorPopup(errorMessage);
}
displayMessagePopup("Number of photo sets reordered: " + countReorderPS + "\n" +
"Number of photo sets reordered in collections: " + countReorderC + "\n" +
"Number of photos reordered in photo sets: " + countReorderP);
}
catch (AppException e) {
displayErrorPopup(e.getMessage());
}
catch (Exception e) {
String errorMessage = "An error occurred during the process:\n\n" + e.getMessage();
log.error(errorMessage, e);
displayErrorPopup(errorMessage);
}
finally {
if (authService != null) {
authService.destroy();
}
if (psService != null) {
psService.destroy();
}
if (colService != null) {
colService.destroy();
}
errorsInTasks = false;
processIsRunning = false;
startButton.setEnabled(true);
}
}
}
});
// Execute new thread
thread.start();
}
/**
* Authenticate
* @param flickr
* @param authService
* @throws AppException
*/
private static void authenticate(Flickr flickr, AuthenticationService authService) throws AppException {
authService = new AuthenticationService(flickr);
if (!authService.checkAuth()) {
// Handle authentication to Flickr API
// - Phase 1
displayMessagePopup("FlickrSorter requires write permissions to your Flickr account.\n"
+ "You will be redirected to the authorization request page.");
URL requestTokenURL = authService.authenticatePhase1();
openWebpage(requestTokenURL);
// - Phase 2
String verifyCode = displayInputPopup("Please accept to authorize the application,\n"
+ "copy the verification code provided by the\n"
+ "Flickr web site, and paste it here:");
// - Phase 3
authService.authenticatePhase3(verifyCode);
displayMessagePopup("You are authorized to use FlickSorter with your Flickr account.\n"
+ "The authorization access has been save in the configuration file.");
}
}
/**
* Reorder PhotoSets
* @param flickr
* @param psService
* @param psBeanList
* @return
* @throws AppException
*/
private static int reorderPhotoSets(Flickr flickr, PhotoSetService psService,
List<PhotoSetBean> psBeanList) throws AppException {
int countReorderPS = 0;
psService = new PhotoSetService(flickr);
psBeanList.addAll(psService.listPhotoSets());
countReorderPS = psService.sortAndReorderPhotoSets(psBeanList);
// Progress bar
progressBar.setValue(33);
return countReorderPS;
}
/**
* Reorder Collections (multithreading)
* @param flickr
* @param colService
* @return
* @throws AppException
*/
private static int reorderCollections(Flickr flickr, CollectionService colService) throws AppException {
int countReorderC = 0;
colService = new CollectionService(flickr);
List<CollectionBean> colBeanList = colService.listCollections();
int n = colBeanList.size();
// Mono-threaded code
/*for (CollectionBean colBean : colBeanList) {
countReorderC += colService.sortAndReorderPhotoSets(colBean);
}*/
// Multi-threaded code
ExecutorService threadPool = Executors.newFixedThreadPool(10);
CompletionService<Integer> pool = new ExecutorCompletionService<Integer>(threadPool);
for (CollectionBean colBean : colBeanList) {
pool.submit(new ReorderCollectionTask(colBean));
}
int i = 0;
for (CollectionBean colBean : colBeanList) {
try {
Integer result = pool.take().get();
if (result != null) {
countReorderC += result.intValue();
}
// Progress bar
i++;
progressBar.setValue(33 + ((33 * i) / n));
}
catch (Exception e) {
if (e.getCause() == null || !(e.getCause() instanceof AppException)) {
String errorMessage = "An error occurred during collection reordering:\n\n" + e.getMessage();
log.error(errorMessage, e);
}
errorsInTasks = true;
}
}
threadPool.shutdown();
return countReorderC;
}
/**
* Reorder Photos (multithreading)
* @param flickr
* @param psBeanList
* @return
* @throws AppException
*/
private static int reorderPhotos(Flickr flickr, List<PhotoSetBean> psBeanList) throws AppException {
int countReorderP = 0;
int n = psBeanList.size();
// Mono-threaded code
/*for (PhotoSetBean psBean : psBeanList) {
countReorderP += psService.sortAndReorderPhotos(psBean);
}*/
// Multi-threaded code
ExecutorService threadPool = Executors.newFixedThreadPool(10);
CompletionService<Integer> pool = new ExecutorCompletionService<Integer>(threadPool);
for (PhotoSetBean psBean : psBeanList) {
pool.submit(new ReorderPhotoSetTask(psBean));
}
int i = 0;
for (PhotoSetBean psBean : psBeanList) {
try {
Integer result = pool.take().get();
if (result != null) {
countReorderP += result.intValue();
}
// Progress bar
i++;
progressBar.setValue(66 + ((33 * i) / n));
}
catch (Exception e) {
if (e.getCause() == null || !(e.getCause() instanceof AppException)) {
String errorMessage = "An error occurred during photo reordering:\n\n" + e.getMessage();
log.error(errorMessage, e);
}
errorsInTasks = true;
}
}
threadPool.shutdown();
return countReorderP;
}
/**
* Prevent several instances of same program
*/
private static void checkLock() {
try
{
@SuppressWarnings("unused")
ProgramLock lock = new ProgramLock();
}
catch(RuntimeException e)
{
// Exit main app
log.debug("Program is already running : abort new program execution");
return;
}
catch (Exception e)
{
log.error("Error with the lock process", e);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/ad_group_bid_modifier_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* Request message for [AdGroupBidModifierService.MutateAdGroupBidModifiers][google.ads.googleads.v10.services.AdGroupBidModifierService.MutateAdGroupBidModifiers].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest}
*/
public final class MutateAdGroupBidModifiersRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)
MutateAdGroupBidModifiersRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateAdGroupBidModifiersRequest.newBuilder() to construct.
private MutateAdGroupBidModifiersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateAdGroupBidModifiersRequest() {
customerId_ = "";
operations_ = java.util.Collections.emptyList();
responseContentType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateAdGroupBidModifiersRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateAdGroupBidModifiersRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
customerId_ = s;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
operations_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation>();
mutable_bitField0_ |= 0x00000001;
}
operations_.add(
input.readMessage(com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.parser(), extensionRegistry));
break;
}
case 24: {
partialFailure_ = input.readBool();
break;
}
case 32: {
validateOnly_ = input.readBool();
break;
}
case 40: {
int rawValue = input.readEnum();
responseContentType_ = rawValue;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
operations_ = java.util.Collections.unmodifiableList(operations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupBidModifiersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupBidModifiersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.class, com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object customerId_;
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OPERATIONS_FIELD_NUMBER = 2;
private java.util.List<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation> operations_;
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation> getOperationsList() {
return operations_;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder>
getOperationsOrBuilderList() {
return operations_;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getOperationsCount() {
return operations_.size();
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperation getOperations(int index) {
return operations_.get(index);
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder getOperationsOrBuilder(
int index) {
return operations_.get(index);
}
public static final int PARTIAL_FAILURE_FIELD_NUMBER = 3;
private boolean partialFailure_;
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return The partialFailure.
*/
@java.lang.Override
public boolean getPartialFailure() {
return partialFailure_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 4;
private boolean validateOnly_;
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
public static final int RESPONSE_CONTENT_TYPE_FIELD_NUMBER = 5;
private int responseContentType_;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The responseContentType.
*/
@java.lang.Override public com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.valueOf(responseContentType_);
return result == null ? com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
for (int i = 0; i < operations_.size(); i++) {
output.writeMessage(2, operations_.get(i));
}
if (partialFailure_ != false) {
output.writeBool(3, partialFailure_);
}
if (validateOnly_ != false) {
output.writeBool(4, validateOnly_);
}
if (responseContentType_ != com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
output.writeEnum(5, responseContentType_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
for (int i = 0; i < operations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, operations_.get(i));
}
if (partialFailure_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, partialFailure_);
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, validateOnly_);
}
if (responseContentType_ != com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(5, responseContentType_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest other = (com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getOperationsList()
.equals(other.getOperationsList())) return false;
if (getPartialFailure()
!= other.getPartialFailure()) return false;
if (getValidateOnly()
!= other.getValidateOnly()) return false;
if (responseContentType_ != other.responseContentType_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (getOperationsCount() > 0) {
hash = (37 * hash) + OPERATIONS_FIELD_NUMBER;
hash = (53 * hash) + getOperationsList().hashCode();
}
hash = (37 * hash) + PARTIAL_FAILURE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPartialFailure());
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getValidateOnly());
hash = (37 * hash) + RESPONSE_CONTENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + responseContentType_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for [AdGroupBidModifierService.MutateAdGroupBidModifiers][google.ads.googleads.v10.services.AdGroupBidModifierService.MutateAdGroupBidModifiers].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupBidModifiersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupBidModifiersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.class, com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getOperationsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
customerId_ = "";
if (operationsBuilder_ == null) {
operations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
operationsBuilder_.clear();
}
partialFailure_ = false;
validateOnly_ = false;
responseContentType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupBidModifiersRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest build() {
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest buildPartial() {
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest result = new com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest(this);
int from_bitField0_ = bitField0_;
result.customerId_ = customerId_;
if (operationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
operations_ = java.util.Collections.unmodifiableList(operations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.operations_ = operations_;
} else {
result.operations_ = operationsBuilder_.build();
}
result.partialFailure_ = partialFailure_;
result.validateOnly_ = validateOnly_;
result.responseContentType_ = responseContentType_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest) {
return mergeFrom((com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest other) {
if (other == com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
onChanged();
}
if (operationsBuilder_ == null) {
if (!other.operations_.isEmpty()) {
if (operations_.isEmpty()) {
operations_ = other.operations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOperationsIsMutable();
operations_.addAll(other.operations_);
}
onChanged();
}
} else {
if (!other.operations_.isEmpty()) {
if (operationsBuilder_.isEmpty()) {
operationsBuilder_.dispose();
operationsBuilder_ = null;
operations_ = other.operations_;
bitField0_ = (bitField0_ & ~0x00000001);
operationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getOperationsFieldBuilder() : null;
} else {
operationsBuilder_.addAllMessages(other.operations_);
}
}
}
if (other.getPartialFailure() != false) {
setPartialFailure(other.getPartialFailure());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
if (other.responseContentType_ != 0) {
setResponseContentTypeValue(other.getResponseContentTypeValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
customerId_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
onChanged();
return this;
}
/**
* <pre>
* Required. ID of the customer whose ad group bid modifiers are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
customerId_ = value;
onChanged();
return this;
}
private java.util.List<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation> operations_ =
java.util.Collections.emptyList();
private void ensureOperationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
operations_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation>(operations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.AdGroupBidModifierOperation, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder, com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder> operationsBuilder_;
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation> getOperationsList() {
if (operationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(operations_);
} else {
return operationsBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getOperationsCount() {
if (operationsBuilder_ == null) {
return operations_.size();
} else {
return operationsBuilder_.getCount();
}
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperation getOperations(int index) {
if (operationsBuilder_ == null) {
return operations_.get(index);
} else {
return operationsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setOperations(
int index, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.set(index, value);
onChanged();
} else {
operationsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setOperations(
int index, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.set(index, builderForValue.build());
onChanged();
} else {
operationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(com.google.ads.googleads.v10.services.AdGroupBidModifierOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.add(value);
onChanged();
} else {
operationsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
int index, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.add(index, value);
onChanged();
} else {
operationsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.add(builderForValue.build());
onChanged();
} else {
operationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
int index, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.add(index, builderForValue.build());
onChanged();
} else {
operationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllOperations(
java.lang.Iterable<? extends com.google.ads.googleads.v10.services.AdGroupBidModifierOperation> values) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, operations_);
onChanged();
} else {
operationsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearOperations() {
if (operationsBuilder_ == null) {
operations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
operationsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeOperations(int index) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.remove(index);
onChanged();
} else {
operationsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder getOperationsBuilder(
int index) {
return getOperationsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder getOperationsOrBuilder(
int index) {
if (operationsBuilder_ == null) {
return operations_.get(index); } else {
return operationsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder>
getOperationsOrBuilderList() {
if (operationsBuilder_ != null) {
return operationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(operations_);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder addOperationsBuilder() {
return getOperationsFieldBuilder().addBuilder(
com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.getDefaultInstance());
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder addOperationsBuilder(
int index) {
return getOperationsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.getDefaultInstance());
}
/**
* <pre>
* Required. The list of operations to perform on individual ad group bid modifiers.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.AdGroupBidModifierOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder>
getOperationsBuilderList() {
return getOperationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.AdGroupBidModifierOperation, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder, com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder>
getOperationsFieldBuilder() {
if (operationsBuilder_ == null) {
operationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.AdGroupBidModifierOperation, com.google.ads.googleads.v10.services.AdGroupBidModifierOperation.Builder, com.google.ads.googleads.v10.services.AdGroupBidModifierOperationOrBuilder>(
operations_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
operations_ = null;
}
return operationsBuilder_;
}
private boolean partialFailure_ ;
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return The partialFailure.
*/
@java.lang.Override
public boolean getPartialFailure() {
return partialFailure_;
}
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @param value The partialFailure to set.
* @return This builder for chaining.
*/
public Builder setPartialFailure(boolean value) {
partialFailure_ = value;
onChanged();
return this;
}
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPartialFailure() {
partialFailure_ = false;
onChanged();
return this;
}
private boolean validateOnly_ ;
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
onChanged();
return this;
}
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
validateOnly_ = false;
onChanged();
return this;
}
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @param value The enum numeric value on the wire for responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentTypeValue(int value) {
responseContentType_ = value;
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The responseContentType.
*/
@java.lang.Override
public com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.valueOf(responseContentType_);
return result == null ? com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @param value The responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentType(com.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType value) {
if (value == null) {
throw new NullPointerException();
}
responseContentType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return This builder for chaining.
*/
public Builder clearResponseContentType() {
responseContentType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest)
private static final com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest();
}
public static com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateAdGroupBidModifiersRequest>
PARSER = new com.google.protobuf.AbstractParser<MutateAdGroupBidModifiersRequest>() {
@java.lang.Override
public MutateAdGroupBidModifiersRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateAdGroupBidModifiersRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateAdGroupBidModifiersRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateAdGroupBidModifiersRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupBidModifiersRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLDecoder;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Iterator;
import java.util.zip.GZIPInputStream;
import java.util.zip.Inflater;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.apache.cordova.api.CallbackContext;
import org.apache.cordova.api.CordovaPlugin;
import org.apache.cordova.api.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import us.costan.chrome.ChromeCookieManager;
import com.squareup.okhttp.OkHttpClient;
public class FileTransfer extends CordovaPlugin {
private static final String LOG_TAG = "FileTransfer";
private static final String LINE_START = "--";
private static final String LINE_END = "\r\n";
private static final String BOUNDARY = "+++++";
public static int FILE_NOT_FOUND_ERR = 1;
public static int INVALID_URL_ERR = 2;
public static int CONNECTION_ERR = 3;
public static int ABORTED_ERR = 4;
private static HashMap<String, RequestContext> activeRequests = new HashMap<String, RequestContext>();
private static final int MAX_BUFFER_SIZE = 16 * 1024;
private static OkHttpClient httpClient = new OkHttpClient();
private static final class RequestContext {
String source;
String target;
File targetFile;
CallbackContext callbackContext;
InputStream currentInputStream;
OutputStream currentOutputStream;
boolean aborted;
RequestContext(String source, String target, CallbackContext callbackContext) {
this.source = source;
this.target = target;
this.callbackContext = callbackContext;
}
void sendPluginResult(PluginResult pluginResult) {
synchronized (this) {
if (!aborted) {
callbackContext.sendPluginResult(pluginResult);
}
}
}
}
/**
* Adds an interface method to an InputStream to return the number of bytes
* read from the raw stream. This is used to track total progress against
* the HTTP Content-Length header value from the server.
*/
private static abstract class TrackingInputStream extends FilterInputStream {
public TrackingInputStream(final InputStream in) {
super(in);
}
public abstract long getTotalRawBytesRead();
}
private static class ExposedGZIPInputStream extends GZIPInputStream {
public ExposedGZIPInputStream(final InputStream in) throws IOException {
super(in);
}
public Inflater getInflater() {
return inf;
}
}
/**
* Provides raw bytes-read tracking for a GZIP input stream. Reports the
* total number of compressed bytes read from the input, rather than the
* number of uncompressed bytes.
*/
private static class TrackingGZIPInputStream extends TrackingInputStream {
private ExposedGZIPInputStream gzin;
public TrackingGZIPInputStream(final ExposedGZIPInputStream gzin) throws IOException {
super(gzin);
this.gzin = gzin;
}
public long getTotalRawBytesRead() {
return gzin.getInflater().getBytesRead();
}
}
/**
* Provides simple total-bytes-read tracking for an existing InputStream
*/
private static class TrackingHTTPInputStream extends TrackingInputStream {
private long bytesRead = 0;
public TrackingHTTPInputStream(InputStream stream) {
super(stream);
}
private int updateBytesRead(int newBytesRead) {
if (newBytesRead != -1) {
bytesRead += newBytesRead;
}
return newBytesRead;
}
@Override
public int read() throws IOException {
return updateBytesRead(super.read());
}
@Override
public int read(byte[] buffer) throws IOException {
return updateBytesRead(super.read(buffer));
}
@Override
public int read(byte[] bytes, int offset, int count) throws IOException {
return updateBytesRead(super.read(bytes, offset, count));
}
public long getTotalRawBytesRead() {
return bytesRead;
}
}
@Override
public boolean execute(String action, JSONArray args, final CallbackContext callbackContext) throws JSONException {
if (action.equals("upload") || action.equals("download")) {
String source = args.getString(0);
String target = args.getString(1);
if (action.equals("upload")) {
try {
source = URLDecoder.decode(source, "UTF-8");
upload(source, target, args, callbackContext);
} catch (UnsupportedEncodingException e) {
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.MALFORMED_URL_EXCEPTION, "UTF-8 error."));
}
} else {
download(source, target, args, callbackContext);
}
return true;
} else if (action.equals("abort")) {
String objectId = args.getString(0);
abort(objectId);
callbackContext.success();
return true;
}
return false;
}
private static void addHeadersToRequest(URLConnection connection, JSONObject headers) {
try {
for (Iterator<?> iter = headers.keys(); iter.hasNext(); ) {
String headerKey = iter.next().toString();
JSONArray headerValues = headers.optJSONArray(headerKey);
if (headerValues == null) {
headerValues = new JSONArray();
headerValues.put(headers.getString(headerKey));
}
connection.setRequestProperty(headerKey, headerValues.getString(0));
for (int i = 1; i < headerValues.length(); ++i) {
connection.addRequestProperty(headerKey, headerValues.getString(i));
}
}
} catch (JSONException e1) {
// No headers to be manipulated!
}
}
/**
* Uploads the specified file to the server URL provided using an HTTP multipart request.
* @param source Full path of the file on the file system
* @param target URL of the server to receive the file
* @param args JSON Array of args
* @param callbackContext callback id for optional progress reports
*
* args[2] fileKey Name of file request parameter
* args[3] fileName File name to be used on server
* args[4] mimeType Describes file content type
* args[5] params key:value pairs of user-defined parameters
* @return FileUploadResult containing result of upload request
*/
private void upload(final String source, final String target, JSONArray args, CallbackContext callbackContext) throws JSONException {
Log.d(LOG_TAG, "upload " + source + " to " + target);
// Setup the options
final String fileKey = getArgument(args, 2, "file");
final String fileName = getArgument(args, 3, "image.jpg");
final String mimeType = getArgument(args, 4, "image/jpeg");
final JSONObject params = args.optJSONObject(5) == null ? new JSONObject() : args.optJSONObject(5);
final boolean trustEveryone = args.optBoolean(6);
// Always use chunked mode unless set to false as per API
final boolean chunkedMode = args.optBoolean(7) || args.isNull(7);
// Look for headers on the params map for backwards compatibility with older Cordova versions.
final JSONObject headers = args.optJSONObject(8) == null ? params.optJSONObject("headers") : args.optJSONObject(8);
final String objectId = args.getString(9);
final String httpMethod = getArgument(args, 10, "POST");
Log.d(LOG_TAG, "fileKey: " + fileKey);
Log.d(LOG_TAG, "fileName: " + fileName);
Log.d(LOG_TAG, "mimeType: " + mimeType);
Log.d(LOG_TAG, "params: " + params);
Log.d(LOG_TAG, "trustEveryone: " + trustEveryone);
Log.d(LOG_TAG, "chunkedMode: " + chunkedMode);
Log.d(LOG_TAG, "headers: " + headers);
Log.d(LOG_TAG, "objectId: " + objectId);
Log.d(LOG_TAG, "httpMethod: " + httpMethod);
final URL url;
try {
url = new URL(target);
} catch (MalformedURLException e) {
JSONObject error = createFileTransferError(INVALID_URL_ERR, source, target, null, 0);
Log.e(LOG_TAG, error.toString(), e);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
final boolean useHttps = url.getProtocol().equals("https");
final RequestContext context = new RequestContext(source, target, callbackContext);
synchronized (activeRequests) {
activeRequests.put(objectId, context);
}
cordova.getThreadPool().execute(new Runnable() {
public void run() {
if (context.aborted) {
return;
}
HttpURLConnection conn = null;
HostnameVerifier oldHostnameVerifier = null;
SSLSocketFactory oldSocketFactory = null;
int totalBytes = 0;
int fixedLength = -1;
try {
// Create return object
FileUploadResult result = new FileUploadResult();
FileProgressResult progress = new FileProgressResult();
//------------------ CLIENT REQUEST
// Open a HTTP connection to the URL based on protocol
if (useHttps) {
// Using standard HTTPS connection. Will not allow self signed certificate
if (!trustEveryone) {
conn = (HttpsURLConnection) httpClient.open(url);
}
// Use our HTTPS connection that blindly trusts everyone.
// This should only be used in debug environments
else {
// Setup the HTTPS connection class to trust everyone
HttpsURLConnection https = (HttpsURLConnection) httpClient.open(url);
oldSocketFactory = trustAllHosts(https);
// Save the current hostnameVerifier
oldHostnameVerifier = https.getHostnameVerifier();
// Setup the connection not to verify hostnames
https.setHostnameVerifier(DO_NOT_VERIFY);
conn = https;
}
}
// Return a standard HTTP connection
else {
conn = httpClient.open(url);
}
// Allow Inputs
conn.setDoInput(true);
// Allow Outputs
conn.setDoOutput(true);
// Don't use a cached copy.
conn.setUseCaches(false);
// Use a post method.
conn.setRequestMethod(httpMethod);
conn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + BOUNDARY);
// Set the cookies on the response
String cookie = ChromeCookieManager.getInstance().getCookie(target);
if (cookie != null) {
conn.setRequestProperty("Cookie", cookie);
}
// Handle the other headers
if (headers != null) {
addHeadersToRequest(conn, headers);
}
/*
* Store the non-file portions of the multipart data as a string, so that we can add it
* to the contentSize, since it is part of the body of the HTTP request.
*/
StringBuilder beforeData = new StringBuilder();
try {
for (Iterator<?> iter = params.keys(); iter.hasNext();) {
Object key = iter.next();
if(!String.valueOf(key).equals("headers"))
{
beforeData.append(LINE_START).append(BOUNDARY).append(LINE_END);
beforeData.append("Content-Disposition: form-data; name=\"").append(key.toString()).append('"');
beforeData.append(LINE_END).append(LINE_END);
beforeData.append(params.getString(key.toString()));
beforeData.append(LINE_END);
}
}
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
beforeData.append(LINE_START).append(BOUNDARY).append(LINE_END);
beforeData.append("Content-Disposition: form-data; name=\"").append(fileKey).append("\";");
beforeData.append(" filename=\"").append(fileName).append('"').append(LINE_END);
beforeData.append("Content-Type: ").append(mimeType).append(LINE_END).append(LINE_END);
byte[] beforeDataBytes = beforeData.toString().getBytes("UTF-8");
byte[] tailParamsBytes = (LINE_END + LINE_START + BOUNDARY + LINE_START + LINE_END).getBytes("UTF-8");
// Get a input stream of the file on the phone
InputStream sourceInputStream = getPathFromUri(source);
int stringLength = beforeDataBytes.length + tailParamsBytes.length;
if (sourceInputStream instanceof FileInputStream) {
fixedLength = (int) ((FileInputStream)sourceInputStream).getChannel().size() + stringLength;
progress.setLengthComputable(true);
progress.setTotal(fixedLength);
}
Log.d(LOG_TAG, "Content Length: " + fixedLength);
// setFixedLengthStreamingMode causes and OutOfMemoryException on pre-Froyo devices.
// http://code.google.com/p/android/issues/detail?id=3164
// It also causes OOM if HTTPS is used, even on newer devices.
boolean useChunkedMode = chunkedMode && (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO || useHttps);
useChunkedMode = useChunkedMode || (fixedLength == -1);
if (useChunkedMode) {
conn.setChunkedStreamingMode(MAX_BUFFER_SIZE);
// Although setChunkedStreamingMode sets this header, setting it explicitly here works
// around an OutOfMemoryException when using https.
conn.setRequestProperty("Transfer-Encoding", "chunked");
} else {
conn.setFixedLengthStreamingMode(fixedLength);
}
conn.connect();
OutputStream sendStream = null;
try {
sendStream = conn.getOutputStream();
synchronized (context) {
if (context.aborted) {
return;
}
context.currentOutputStream = sendStream;
}
//We don't want to change encoding, we just want this to write for all Unicode.
sendStream.write(beforeDataBytes);
totalBytes += beforeDataBytes.length;
// create a buffer of maximum size
int bytesAvailable = sourceInputStream.available();
int bufferSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE);
byte[] buffer = new byte[bufferSize];
// read file and write it into form...
int bytesRead = sourceInputStream.read(buffer, 0, bufferSize);
long prevBytesRead = 0;
while (bytesRead > 0) {
result.setBytesSent(totalBytes);
sendStream.write(buffer, 0, bytesRead);
totalBytes += bytesRead;
if (totalBytes > prevBytesRead + 102400) {
prevBytesRead = totalBytes;
Log.d(LOG_TAG, "Uploaded " + totalBytes + " of " + fixedLength + " bytes");
}
bytesAvailable = sourceInputStream.available();
bufferSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE);
bytesRead = sourceInputStream.read(buffer, 0, bufferSize);
// Send a progress event.
progress.setLoaded(totalBytes);
PluginResult progressResult = new PluginResult(PluginResult.Status.OK, progress.toJSONObject());
progressResult.setKeepCallback(true);
context.sendPluginResult(progressResult);
}
// send multipart form data necessary after file data...
sendStream.write(tailParamsBytes);
totalBytes += tailParamsBytes.length;
sendStream.flush();
} finally {
safeClose(sourceInputStream);
safeClose(sendStream);
}
context.currentOutputStream = null;
Log.d(LOG_TAG, "Sent " + totalBytes + " of " + fixedLength);
//------------------ read the SERVER RESPONSE
String responseString;
int responseCode = conn.getResponseCode();
Log.d(LOG_TAG, "response code: " + responseCode);
Log.d(LOG_TAG, "response headers: " + conn.getHeaderFields());
TrackingInputStream inStream = null;
try {
inStream = getInputStream(conn);
synchronized (context) {
if (context.aborted) {
return;
}
context.currentInputStream = inStream;
}
ByteArrayOutputStream out = new ByteArrayOutputStream(Math.max(1024, conn.getContentLength()));
byte[] buffer = new byte[1024];
int bytesRead = 0;
// write bytes to file
while ((bytesRead = inStream.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
responseString = out.toString("UTF-8");
} finally {
context.currentInputStream = null;
safeClose(inStream);
}
Log.d(LOG_TAG, "got response from server");
Log.d(LOG_TAG, responseString.substring(0, Math.min(256, responseString.length())));
// send request and retrieve response
result.setResponseCode(responseCode);
result.setResponse(responseString);
context.sendPluginResult(new PluginResult(PluginResult.Status.OK, result.toJSONObject()));
} catch (FileNotFoundException e) {
JSONObject error = createFileTransferError(FILE_NOT_FOUND_ERR, source, target, conn);
Log.e(LOG_TAG, error.toString(), e);
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} catch (IOException e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn);
Log.e(LOG_TAG, error.toString(), e);
Log.e(LOG_TAG, "Failed after uploading " + totalBytes + " of " + fixedLength + " bytes.");
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
context.sendPluginResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION));
} catch (Throwable t) {
// Shouldn't happen, but will
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn);
Log.e(LOG_TAG, error.toString(), t);
context.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
} finally {
synchronized (activeRequests) {
activeRequests.remove(objectId);
}
if (conn != null) {
// Revert back to the proper verifier and socket factories
// Revert back to the proper verifier and socket factories
if (trustEveryone && useHttps) {
HttpsURLConnection https = (HttpsURLConnection) conn;
https.setHostnameVerifier(oldHostnameVerifier);
https.setSSLSocketFactory(oldSocketFactory);
}
}
}
}
});
}
private static void safeClose(Closeable stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
}
}
}
private static TrackingInputStream getInputStream(URLConnection conn) throws IOException {
String encoding = conn.getContentEncoding();
if (encoding != null && encoding.equalsIgnoreCase("gzip")) {
return new TrackingGZIPInputStream(new ExposedGZIPInputStream(conn.getInputStream()));
}
return new TrackingHTTPInputStream(conn.getInputStream());
}
// always verify the host - don't check for certificate
private static final HostnameVerifier DO_NOT_VERIFY = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) {
return true;
}
};
// Create a trust manager that does not validate certificate chains
private static final TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() {
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return new java.security.cert.X509Certificate[] {};
}
public void checkClientTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
}
public void checkServerTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
}
} };
/**
* This function will install a trust manager that will blindly trust all SSL
* certificates. The reason this code is being added is to enable developers
* to do development using self signed SSL certificates on their web server.
*
* The standard HttpsURLConnection class will throw an exception on self
* signed certificates if this code is not run.
*/
private static SSLSocketFactory trustAllHosts(HttpsURLConnection connection) {
// Install the all-trusting trust manager
SSLSocketFactory oldFactory = connection.getSSLSocketFactory();
try {
// Install our all trusting manager
SSLContext sc = SSLContext.getInstance("TLS");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
SSLSocketFactory newFactory = sc.getSocketFactory();
connection.setSSLSocketFactory(newFactory);
} catch (Exception e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return oldFactory;
}
private static JSONObject createFileTransferError(int errorCode, String source, String target, URLConnection connection) {
int httpStatus = 0;
StringBuilder bodyBuilder = new StringBuilder();
String body = null;
if (connection != null) {
try {
if (connection instanceof HttpURLConnection) {
httpStatus = ((HttpURLConnection)connection).getResponseCode();
InputStream err = ((HttpURLConnection) connection).getErrorStream();
if(err != null)
{
BufferedReader reader = new BufferedReader(new InputStreamReader(err, "UTF-8"));
String line = reader.readLine();
while(line != null)
{
bodyBuilder.append(line);
line = reader.readLine();
if(line != null)
bodyBuilder.append('\n');
}
body = bodyBuilder.toString();
}
}
} catch (IOException e) {
Log.w(LOG_TAG, "Error getting HTTP status code from connection.", e);
}
}
return createFileTransferError(errorCode, source, target, body, httpStatus);
}
/**
* Create an error object based on the passed in errorCode
* @param errorCode the error
* @return JSONObject containing the error
*/
private static JSONObject createFileTransferError(int errorCode, String source, String target, String body, Integer httpStatus) {
JSONObject error = null;
try {
error = new JSONObject();
error.put("code", errorCode);
error.put("source", source);
error.put("target", target);
if(body != null)
{
error.put("body", body);
}
if (httpStatus != null) {
error.put("http_status", httpStatus);
}
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return error;
}
/**
* Convenience method to read a parameter from the list of JSON args.
* @param args the args passed to the Plugin
* @param position the position to retrieve the arg from
* @param defaultString the default to be used if the arg does not exist
* @return String with the retrieved value
*/
private static String getArgument(JSONArray args, int position, String defaultString) {
String arg = defaultString;
if (args.length() > position) {
arg = args.optString(position);
if (arg == null || "null".equals(arg)) {
arg = defaultString;
}
}
return arg;
}
/**
* Downloads a file form a given URL and saves it to the specified directory.
*
* @param source URL of the server to receive the file
* @param target Full path of the file on the file system
*/
private void download(final String source, final String target, JSONArray args, CallbackContext callbackContext) throws JSONException {
Log.d(LOG_TAG, "download " + source + " to " + target);
final boolean trustEveryone = args.optBoolean(2);
final String objectId = args.getString(3);
final JSONObject headers = args.optJSONObject(4);
final URL url;
try {
url = new URL(source);
} catch (MalformedURLException e) {
JSONObject error = createFileTransferError(INVALID_URL_ERR, source, target, null, 0);
Log.e(LOG_TAG, error.toString(), e);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
final boolean useHttps = url.getProtocol().equals("https");
if (!Config.isUrlWhiteListed(source)) {
Log.w(LOG_TAG, "Source URL is not in white list: '" + source + "'");
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, null, 401);
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.IO_EXCEPTION, error));
return;
}
final RequestContext context = new RequestContext(source, target, callbackContext);
synchronized (activeRequests) {
activeRequests.put(objectId, context);
}
cordova.getThreadPool().execute(new Runnable() {
public void run() {
if (context.aborted) {
return;
}
URLConnection connection = null;
HostnameVerifier oldHostnameVerifier = null;
SSLSocketFactory oldSocketFactory = null;
File file = null;
PluginResult result = null;
try {
file = getFileFromPath(target);
context.targetFile = file;
// create needed directories
file.getParentFile().mkdirs();
// connect to server
// Open a HTTP connection to the URL based on protocol
if (useHttps) {
// Using standard HTTPS connection. Will not allow self signed certificate
if (!trustEveryone) {
connection = (HttpsURLConnection) httpClient.open(url);
}
// Use our HTTPS connection that blindly trusts everyone.
// This should only be used in debug environments
else {
// Setup the HTTPS connection class to trust everyone
HttpsURLConnection https = (HttpsURLConnection) httpClient.open(url);
oldSocketFactory = trustAllHosts(https);
// Save the current hostnameVerifier
oldHostnameVerifier = https.getHostnameVerifier();
// Setup the connection not to verify hostnames
https.setHostnameVerifier(DO_NOT_VERIFY);
connection = https;
}
}
// Return a standard HTTP connection
else {
connection = httpClient.open(url);
}
if (connection instanceof HttpURLConnection) {
((HttpURLConnection)connection).setRequestMethod("GET");
}
//Add cookie support
String cookie = ChromeCookieManager.getInstance().getCookie(source);
if(cookie != null)
{
connection.setRequestProperty("cookie", cookie);
}
// This must be explicitly set for gzip progress tracking to work.
connection.setRequestProperty("Accept-Encoding", "gzip");
// Handle the other headers
if (headers != null) {
addHeadersToRequest(connection, headers);
}
connection.connect();
Log.d(LOG_TAG, "Download file:" + url);
FileProgressResult progress = new FileProgressResult();
if (connection.getContentEncoding() == null || connection.getContentEncoding().equalsIgnoreCase("gzip")) {
// Only trust content-length header if we understand
// the encoding -- identity or gzip
progress.setLengthComputable(true);
progress.setTotal(connection.getContentLength());
}
FileOutputStream outputStream = null;
TrackingInputStream inputStream = null;
try {
inputStream = getInputStream(connection);
outputStream = new FileOutputStream(file);
synchronized (context) {
if (context.aborted) {
return;
}
context.currentInputStream = inputStream;
}
// write bytes to file
byte[] buffer = new byte[MAX_BUFFER_SIZE];
int bytesRead = 0;
while ((bytesRead = inputStream.read(buffer)) > 0) {
outputStream.write(buffer, 0, bytesRead);
// Send a progress event.
progress.setLoaded(inputStream.getTotalRawBytesRead());
PluginResult progressResult = new PluginResult(PluginResult.Status.OK, progress.toJSONObject());
progressResult.setKeepCallback(true);
context.sendPluginResult(progressResult);
}
} finally {
context.currentInputStream = null;
safeClose(inputStream);
safeClose(outputStream);
}
Log.d(LOG_TAG, "Saved file: " + target);
// create FileEntry object
JSONObject fileEntry = FileUtils.getEntry(file);
result = new PluginResult(PluginResult.Status.OK, fileEntry);
} catch (FileNotFoundException e) {
JSONObject error = createFileTransferError(FILE_NOT_FOUND_ERR, source, target, connection);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} catch (IOException e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, connection);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
result = new PluginResult(PluginResult.Status.JSON_EXCEPTION);
} catch (Throwable e) {
JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, connection);
Log.e(LOG_TAG, error.toString(), e);
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, error);
} finally {
synchronized (activeRequests) {
activeRequests.remove(objectId);
}
if (connection != null) {
// Revert back to the proper verifier and socket factories
if (trustEveryone && useHttps) {
HttpsURLConnection https = (HttpsURLConnection) connection;
https.setHostnameVerifier(oldHostnameVerifier);
https.setSSLSocketFactory(oldSocketFactory);
}
}
if (result == null) {
result = new PluginResult(PluginResult.Status.ERROR, createFileTransferError(CONNECTION_ERR, source, target, connection));
}
// Remove incomplete download.
if (result.getStatus() != PluginResult.Status.OK.ordinal() && file != null) {
file.delete();
}
context.sendPluginResult(result);
}
}
});
}
/**
* Get an input stream based on file path or content:// uri
*
* @param path foo
* @return an input stream
* @throws FileNotFoundException
*/
private InputStream getPathFromUri(String path) throws FileNotFoundException {
try {
InputStream stream = FileHelper.getInputStreamFromUriString(path, cordova);
if (stream == null) {
return new FileInputStream(path);
} else {
return stream;
}
} catch (IOException e) {
throw new FileNotFoundException();
}
}
/**
* Get a File object from the passed in path
*
* @param path file path
* @return file object
*/
private File getFileFromPath(String path) throws FileNotFoundException {
File file;
String prefix = "file://";
if (path.startsWith(prefix)) {
file = new File(path.substring(prefix.length()));
} else {
file = new File(path);
}
if (file.getParent() == null) {
throw new FileNotFoundException();
}
return file;
}
/**
* Abort an ongoing upload or download.
*/
private void abort(String objectId) {
final RequestContext context;
synchronized (activeRequests) {
context = activeRequests.remove(objectId);
}
if (context != null) {
File file = context.targetFile;
if (file != null) {
file.delete();
}
// Trigger the abort callback immediately to minimize latency between it and abort() being called.
JSONObject error = createFileTransferError(ABORTED_ERR, context.source, context.target, null, -1);
synchronized (context) {
context.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, error));
context.aborted = true;
}
// Closing the streams can block, so execute on a background thread.
cordova.getThreadPool().execute(new Runnable() {
public void run() {
synchronized (context) {
safeClose(context.currentInputStream);
safeClose(context.currentOutputStream);
}
}
});
}
}
}
| |
/*
* $Id: TestTilesPlugin.java 557933 2007-07-20 09:15:51Z apetrelli $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts.tiles2;
import java.lang.reflect.InvocationTargetException;
import javax.servlet.ServletException;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts.Globals;
import org.apache.struts.action.PlugIn;
import org.apache.struts.config.ModuleConfig;
import org.apache.struts.config.ModuleConfigFactory;
import org.apache.struts.config.PlugInConfig;
import org.apache.struts.mock.MockActionServlet;
import org.apache.struts.mock.TestMockBase;
import org.apache.struts.util.RequestUtils;
import org.apache.tiles.TilesContainer;
import org.apache.tiles.access.TilesAccess;
import org.apache.tiles.definition.DefinitionsFactory;
import org.apache.tiles.impl.BasicTilesContainer;
import org.apache.tiles.impl.KeyedDefinitionsFactoryTilesContainer;
/**
* Tests the Tiles plugin.
*
* @version $Rev: 557933 $ $Date: 2007-07-20 11:15:51 +0200 (Ven, 20 jul 2007) $
*/
public class TestTilesPlugin extends TestMockBase {
/**
* The first module to configure.
*/
protected ModuleConfig module1;
/**
* The second module to configure.
*/
protected ModuleConfig module2;
/**
* A testing action servlet.
*/
protected MockActionServlet actionServlet;
/**
* The logging object.
*/
private static final Log LOG = LogFactory.getLog(TestTilesPlugin.class);
// ----------------------------------------------------------------- Basics
/**
* Constructor.
*
* @param name The name of the test.
*/
public TestTilesPlugin(String name) {
super(name);
}
/**
* Sample main method.
*
* @param args Arguments.
*/
public static void main(String[] args) {
junit.awtui.TestRunner.main(new String[] { TestTilesPlugin.class
.getName() });
}
/**
* Test suite method.
*
* @return The test.
*/
public static Test suite() {
return (new TestSuite(TestTilesPlugin.class));
}
// ----------------------------------------------------- Instance Variables
// ----------------------------------------------------- Setup and Teardown
/** {@inheritDoc} */
public void setUp() {
super.setUp();
actionServlet = new MockActionServlet(context, config);
}
/** {@inheritDoc} */
public void tearDown() {
super.tearDown();
}
// ------------------------------------------------------- Individual Tests
// ---------------------------------------------------------- absoluteURL()
/**
* Test multi factory creation when moduleAware=true.
*
* @throws ServletException
* If something goes wrong during initialization.
* @throws InvocationTargetException
* Bean properties problems.
* @throws InstantiationException
* Bean properties problems.
* @throws IllegalAccessException
* Bean properties problems.
* @throws ClassNotFoundException
* Bean properties problems.
*/
public void testMultiFactory() throws ClassNotFoundException,
IllegalAccessException, InstantiationException,
InvocationTargetException, ServletException {
// init TilesPlugin
module1 = createModuleConfig("/module1", "tiles-defs.xml", true);
module2 = createModuleConfig("/module2", "tiles-defs.xml", true);
initModulePlugIns(module1);
initModulePlugIns(module2);
// mock request context
request.setAttribute(Globals.MODULE_KEY, module1);
request.setPathElements("/myapp", "/module1/foo.do", null, null);
// Retrieve TilesContainer
TilesContainer container = TilesAccess.getContainer(actionServlet
.getServletContext());
assertSame(container.getClass().getName(),
KeyedDefinitionsFactoryTilesContainer.class.getName());
// Retrieve factory for module1
DefinitionsFactory factory1 = ((KeyedDefinitionsFactoryTilesContainer) container)
.getDefinitionsFactory("/module1");
assertNotNull("factory found", factory1);
// mock request context
request.setAttribute(Globals.MODULE_KEY, module2);
request.setPathElements("/myapp", "/module2/foo.do", null, null);
// Retrieve factory for module2
DefinitionsFactory factory2 = ((KeyedDefinitionsFactoryTilesContainer) container)
.getDefinitionsFactory("/module2");
assertNotNull("factory found", factory2);
// Check that factory are different
// FIXME This assert fails!
assertNotSame("Factory from different modules", factory1, factory2);
}
/**
* Tests if the TilesPlugin does a fail-fast on multiple configuration of
* the same module.
*
* @throws ServletException If something goes wrong during initialization.
* @throws InvocationTargetException Bean properties problems.
* @throws InstantiationException Bean properties problems.
* @throws IllegalAccessException Bean properties problems.
* @throws ClassNotFoundException Bean properties problems.
*/
public void testMultiModuleFailFast() throws ClassNotFoundException,
IllegalAccessException, InstantiationException,
InvocationTargetException, ServletException {
// init TilesPlugin
module1 = createModuleConfig("/module1", "tiles-defs.xml", true);
// The name is "/module1" on purpose
module2 = createModuleConfig("/module1", "tiles-defs.xml", true);
initModulePlugIns(module1);
try {
initModulePlugIns(module2);
fail("An exception should have been thrown");
} catch (ServletException e) {
// It is ok
LOG.debug("Intercepted a ServletException, it is ok", e);
}
}
/**
* Test single factory creation when moduleAware=false.
*
* @throws ServletException If something goes wrong during initialization.
* @throws InvocationTargetException Bean properties problems.
* @throws InstantiationException Bean properties problems.
* @throws IllegalAccessException Bean properties problems.
* @throws ClassNotFoundException Bean properties problems.
*/
public void testSingleSharedFactory() throws ClassNotFoundException,
IllegalAccessException, InstantiationException,
InvocationTargetException, ServletException {
// init TilesPlugin
module1 = createModuleConfig("/module1", "tiles-defs.xml", false);
module2 = createModuleConfig("/module2", "tiles-defs.xml", false);
initModulePlugIns(module1);
try {
initModulePlugIns(module2);
fail("An exception should have been thrown");
} catch (ServletException e) {
// It is ok
LOG.debug("Intercepted a ServletException, it is ok", e);
}
// mock request context
request.setAttribute(Globals.MODULE_KEY, module1);
request.setPathElements("/myapp", "/module1/foo.do", null, null);
// Retrieve TilesContainer
TilesContainer container = TilesAccess.getContainer(actionServlet
.getServletContext());
assertSame(container.getClass().getName(), BasicTilesContainer.class
.getName());
// Retrieve factory for module1
DefinitionsFactory factory1 = ((BasicTilesContainer) container)
.getDefinitionsFactory();
assertNotNull("factory found", factory1);
// mock request context
request.setAttribute(Globals.MODULE_KEY, module2);
request.setPathElements("/myapp", "/module2/foo.do", null, null);
// Retrieve factory for module2
DefinitionsFactory factory2 = ((BasicTilesContainer) container)
.getDefinitionsFactory();
assertNotNull("factory found", factory2);
// Check that factory are different
assertEquals("Same factory", factory1, factory2);
}
/**
* Create a module configuration.
*
* @param moduleName The name of the module.
* @param configFileName The name of the configuration file.
* @param moduleAware <code>true</code> if the configuration must be
* module-aware.
* @return The configuration object.
*/
private ModuleConfig createModuleConfig(String moduleName,
String configFileName, boolean moduleAware) {
ModuleConfig moduleConfig = ModuleConfigFactory.createFactory()
.createModuleConfig(moduleName);
context.setAttribute(Globals.MODULE_KEY + moduleName, moduleConfig);
// Set tiles plugin
PlugInConfig pluginConfig = new PlugInConfig();
pluginConfig.setClassName("org.apache.struts.tiles2.TilesPlugin");
pluginConfig.addProperty("moduleAware",
(moduleAware ? "true" : "false"));
pluginConfig.addProperty("definitions-config",
"/org/apache/struts/tiles2/config/" + configFileName);
moduleConfig.addPlugInConfig(pluginConfig);
return moduleConfig;
}
/**
* Fake call to init module plugins.
*
* @param moduleConfig The configuration of the module.
* @throws ServletException If something goes wrong during initialization.
* @throws InvocationTargetException Bean properties problems.
* @throws InstantiationException Bean properties problems.
* @throws IllegalAccessException Bean properties problems.
* @throws ClassNotFoundException Bean properties problems.
*/
private void initModulePlugIns(ModuleConfig moduleConfig)
throws ClassNotFoundException, IllegalAccessException,
InstantiationException, InvocationTargetException, ServletException {
PlugInConfig[] plugInConfigs = moduleConfig.findPlugInConfigs();
PlugIn[] plugIns = new PlugIn[plugInConfigs.length];
context.setAttribute(Globals.PLUG_INS_KEY + moduleConfig.getPrefix(),
plugIns);
for (int i = 0; i < plugIns.length; i++) {
plugIns[i] = (PlugIn) RequestUtils
.applicationInstance(plugInConfigs[i].getClassName());
BeanUtils.populate(plugIns[i], plugInConfigs[i].getProperties());
// Pass the current plugIn config object to the PlugIn.
// The property is set only if the plugin declares it.
// This plugin config object is needed by Tiles
BeanUtils.copyProperty(plugIns[i], "currentPlugInConfigObject",
plugInConfigs[i]);
plugIns[i].init(actionServlet, moduleConfig);
}
}
}
| |
/*
*
* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://orientdb.com
*
*/
package com.orientechnologies.orient.core.db;
import com.orientechnologies.orient.core.cache.OLocalRecordCache;
import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.config.OContextConfiguration;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.tool.ODatabaseImport;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.intent.OIntent;
import com.orientechnologies.orient.core.metadata.security.OToken;
import com.orientechnologies.orient.core.storage.ORecordMetadata;
import com.orientechnologies.orient.core.storage.OStorage;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
@SuppressWarnings("unchecked")
public abstract class ODatabaseWrapperAbstract<DB extends ODatabaseInternal, T>
implements ODatabaseInternal<T> {
protected DB underlying;
protected ODatabaseInternal<?> databaseOwner;
public ODatabaseWrapperAbstract(final DB iDatabase) {
underlying = iDatabase;
databaseOwner = this;
}
public <THISDB extends ODatabase> THISDB open(
final String iUserName, final String iUserPassword) {
underlying.open(iUserName, iUserPassword);
return (THISDB) this;
}
public <THISDB extends ODatabase> THISDB open(final OToken iToken) {
underlying.open(iToken);
return (THISDB) this;
}
@Override
public ODatabase activateOnCurrentThread() {
return underlying.activateOnCurrentThread();
}
@Override
public boolean isActiveOnCurrentThread() {
return underlying.isActiveOnCurrentThread();
}
public <THISDB extends ODatabase> THISDB create() {
return (THISDB) underlying.create();
}
@Override
public <THISDB extends ODatabase> THISDB create(String incrementalBackupPath) {
return (THISDB) underlying.create(incrementalBackupPath);
}
public <THISDB extends ODatabase> THISDB create(
final Map<OGlobalConfiguration, Object> iInitialSettings) {
underlying.create(iInitialSettings);
return (THISDB) this;
}
public boolean exists() {
return underlying.exists();
}
public void reload() {
underlying.reload();
}
@Override
public OContextConfiguration getConfiguration() {
return underlying.getConfiguration();
}
/**
* Executes a backup of the database. During the backup the database will be frozen in read-only
* mode.
*
* @param out OutputStream used to write the backup content. Use a FileOutputStream to make the
* backup persistent on disk
* @param options Backup options as Map<String, Object> object
* @param callable Callback to execute when the database is locked
* @param iListener Listener called for backup messages
* @param compressionLevel ZIP Compression level between 0 (no compression) and 9 (maximum). The
* bigger is the compression, the smaller will be the final backup content, but will consume
* more CPU and time to execute
* @param bufferSize Buffer size in bytes, the bigger is the buffer, the more efficient will be
* the compression
* @throws IOException
*/
@Override
public List<String> backup(
OutputStream out,
Map<String, Object> options,
Callable<Object> callable,
final OCommandOutputListener iListener,
int compressionLevel,
int bufferSize)
throws IOException {
return underlying.backup(out, options, callable, iListener, compressionLevel, bufferSize);
}
/**
* Executes a restore of a database backup. During the restore the database will be frozen in
* read-only mode.
*
* @param in InputStream used to read the backup content. Use a FileInputStream to read a backup
* on a disk
* @param options Backup options as Map<String, Object> object
* @param callable Callback to execute when the database is locked
* @param iListener Listener called for backup messages
* @throws IOException
* @see ODatabaseImport
*/
@Override
public void restore(
InputStream in,
Map<String, Object> options,
Callable<Object> callable,
final OCommandOutputListener iListener)
throws IOException {
underlying.restore(in, options, callable, iListener);
}
public void close() {
underlying.close();
}
public void replaceStorage(OStorage iNewStorage) {
underlying.replaceStorage(iNewStorage);
}
public void drop() {
underlying.drop();
}
public STATUS getStatus() {
return underlying.getStatus();
}
public <THISDB extends ODatabase> THISDB setStatus(final STATUS iStatus) {
underlying.setStatus(iStatus);
return (THISDB) this;
}
public String getName() {
return underlying.getName();
}
public String getURL() {
return underlying.getURL();
}
public OStorage getStorage() {
return underlying.getStorage();
}
public OLocalRecordCache getLocalCache() {
return underlying.getLocalCache();
}
public boolean isClosed() {
return underlying.isClosed();
}
public long countClusterElements(final int iClusterId) {
checkOpenness();
return underlying.countClusterElements(iClusterId);
}
/** {@inheritDoc} */
@Override
public void truncateCluster(String clusterName) {
checkOpenness();
underlying.truncateCluster(clusterName);
}
public long countClusterElements(final int[] iClusterIds) {
checkOpenness();
return underlying.countClusterElements(iClusterIds);
}
public long countClusterElements(final String iClusterName) {
checkOpenness();
return underlying.countClusterElements(iClusterName);
}
@Override
public long countClusterElements(int iClusterId, boolean countTombstones) {
checkOpenness();
return underlying.countClusterElements(iClusterId, countTombstones);
}
@Override
public long countClusterElements(int[] iClusterIds, boolean countTombstones) {
checkOpenness();
return underlying.countClusterElements(iClusterIds, countTombstones);
}
public int getClusters() {
checkOpenness();
return underlying.getClusters();
}
public boolean existsCluster(String iClusterName) {
checkOpenness();
return underlying.existsCluster(iClusterName);
}
public Collection<String> getClusterNames() {
checkOpenness();
return underlying.getClusterNames();
}
public int getClusterIdByName(final String iClusterName) {
checkOpenness();
return underlying.getClusterIdByName(iClusterName);
}
public String getClusterNameById(final int iClusterId) {
checkOpenness();
return underlying.getClusterNameById(iClusterId);
}
public long getClusterRecordSizeById(int iClusterId) {
return underlying.getClusterRecordSizeById(iClusterId);
}
public long getClusterRecordSizeByName(String iClusterName) {
return underlying.getClusterRecordSizeByName(iClusterName);
}
public int addCluster(String iClusterName, int iRequestedId) {
checkOpenness();
return underlying.addCluster(iClusterName, iRequestedId);
}
public int addCluster(final String iClusterName, final Object... iParameters) {
checkOpenness();
return underlying.addCluster(iClusterName, iParameters);
}
public boolean dropCluster(final String iClusterName) {
getLocalCache().freeCluster(getClusterIdByName(iClusterName));
return underlying.dropCluster(iClusterName);
}
public boolean dropCluster(final int iClusterId) {
getLocalCache().freeCluster(iClusterId);
return underlying.dropCluster(iClusterId);
}
public int getDefaultClusterId() {
checkOpenness();
return underlying.getDefaultClusterId();
}
public boolean declareIntent(final OIntent iIntent) {
return underlying.declareIntent(iIntent);
}
@Override
public OIntent getActiveIntent() {
return underlying.getActiveIntent();
}
public <DBTYPE extends ODatabase> DBTYPE getUnderlying() {
return (DBTYPE) underlying;
}
public ODatabaseInternal<?> getDatabaseOwner() {
return databaseOwner;
}
public ODatabaseInternal<?> setDatabaseOwner(final ODatabaseInternal<?> iOwner) {
databaseOwner = iOwner;
return this;
}
@Override
public boolean equals(final Object iOther) {
if (!(iOther instanceof ODatabase)) return false;
final ODatabase other = (ODatabase) iOther;
return other.getName().equals(getName());
}
@Override
public String toString() {
return underlying.toString();
}
public Object setProperty(final String iName, final Object iValue) {
return underlying.setProperty(iName, iValue);
}
public Object getProperty(final String iName) {
return underlying.getProperty(iName);
}
public Iterator<Entry<String, Object>> getProperties() {
return underlying.getProperties();
}
public Object get(final ATTRIBUTES iAttribute) {
return underlying.get(iAttribute);
}
public <THISDB extends ODatabase> THISDB set(final ATTRIBUTES attribute, final Object iValue) {
return (THISDB) underlying.set(attribute, iValue);
}
public void registerListener(final ODatabaseListener iListener) {
underlying.registerListener(iListener);
}
public void unregisterListener(final ODatabaseListener iListener) {
underlying.unregisterListener(iListener);
}
@Override
public ORecordMetadata getRecordMetadata(ORID rid) {
return underlying.getRecordMetadata(rid);
}
@Override
public long getSize() {
return underlying.getSize();
}
@Override
public void freeze(boolean throwException) {
underlying.freeze(throwException);
}
@Override
public void freeze() {
underlying.freeze();
}
@Override
public void release() {
underlying.release();
}
protected void checkOpenness() {
if (isClosed()) throw new ODatabaseException("Database '" + getURL() + "' is closed");
}
}
| |
/*
* Copyright 2015 The AppAuth for Android Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openid.appauth.browser;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.pm.ResolveInfo;
import android.net.Uri;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.browser.customtabs.CustomTabsService;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Utility class to obtain the browser package name to be used for
* {@link net.openid.appauth.AuthorizationService#performAuthorizationRequest(
* net.openid.appauth.AuthorizationRequest,
* android.app.PendingIntent)} calls. It prioritizes browsers which support
* [custom tabs](https://developer.chrome.com/multidevice/android/customtabs). To mitigate
* man-in-the-middle attacks by malicious apps pretending to be browsers for the specific URI we
* query, only those which are registered as a handler for _all_ HTTP and HTTPS URIs will be
* used.
*/
public final class BrowserSelector {
private static final String SCHEME_HTTP = "http";
private static final String SCHEME_HTTPS = "https";
/**
* The service we expect to find on a web browser that indicates it supports custom tabs.
*/
@VisibleForTesting
static final String ACTION_CUSTOM_TABS_CONNECTION =
CustomTabsService.ACTION_CUSTOM_TABS_CONNECTION;
/**
* Intent for querying installed web browsers as seen at
* https://cs.android.com/android/platform/superproject/+/master:packages/modules/Permission/PermissionController/src/com/android/permissioncontroller/role/model/BrowserRoleBehavior.java
*/
@VisibleForTesting
static final Intent BROWSER_INTENT = new Intent()
.setAction(Intent.ACTION_VIEW)
.addCategory(Intent.CATEGORY_BROWSABLE)
.setData(Uri.fromParts("http", "", null));
/**
* Retrieves the full list of browsers installed on the device. Two entries will exist
* for each browser that supports custom tabs, with the {@link BrowserDescriptor#useCustomTab}
* flag set to `true` in one and `false` in the other. The list is in the
* order returned by the package manager, so indirectly reflects the user's preferences
* (i.e. their default browser, if set, should be the first entry in the list).
*/
@SuppressLint("PackageManagerGetSignatures")
@NonNull
public static List<BrowserDescriptor> getAllBrowsers(Context context) {
PackageManager pm = context.getPackageManager();
List<BrowserDescriptor> browsers = new ArrayList<>();
String defaultBrowserPackage = null;
int queryFlag = PackageManager.GET_RESOLVED_FILTER;
if (VERSION.SDK_INT >= VERSION_CODES.M) {
queryFlag |= PackageManager.MATCH_ALL;
}
// When requesting all matching activities for an intent from the package manager,
// the user's preferred browser is not guaranteed to be at the head of this list.
// Therefore, the preferred browser must be separately determined and the resultant
// list of browsers reordered to restored this desired property.
ResolveInfo resolvedDefaultActivity =
pm.resolveActivity(BROWSER_INTENT, 0);
if (resolvedDefaultActivity != null) {
defaultBrowserPackage = resolvedDefaultActivity.activityInfo.packageName;
}
List<ResolveInfo> resolvedActivityList =
pm.queryIntentActivities(BROWSER_INTENT, queryFlag);
for (ResolveInfo info : resolvedActivityList) {
// ignore handlers which are not browsers
if (!isFullBrowser(info)) {
continue;
}
try {
int defaultBrowserIndex = 0;
PackageInfo packageInfo = pm.getPackageInfo(
info.activityInfo.packageName,
PackageManager.GET_SIGNATURES);
if (hasWarmupService(pm, info.activityInfo.packageName)) {
BrowserDescriptor customTabBrowserDescriptor =
new BrowserDescriptor(packageInfo, true);
if (info.activityInfo.packageName.equals(defaultBrowserPackage)) {
// If the default browser is having a WarmupService,
// will it be added to the beginning of the list.
browsers.add(defaultBrowserIndex, customTabBrowserDescriptor);
defaultBrowserIndex++;
} else {
browsers.add(customTabBrowserDescriptor);
}
}
BrowserDescriptor fullBrowserDescriptor =
new BrowserDescriptor(packageInfo, false);
if (info.activityInfo.packageName.equals(defaultBrowserPackage)) {
// The default browser is added to the beginning of the list.
// If there is support for Custom Tabs, will the one disabling Custom Tabs
// be added as the second entry.
browsers.add(defaultBrowserIndex, fullBrowserDescriptor);
} else {
browsers.add(fullBrowserDescriptor);
}
} catch (NameNotFoundException e) {
// a descriptor cannot be generated without the package info
}
}
return browsers;
}
/**
* Searches through all browsers for the best match based on the supplied browser matcher.
* Custom tab supporting browsers are preferred, if the matcher permits them, and browsers
* are evaluated in the order returned by the package manager, which should indirectly match
* the user's preferences.
*
* @param context {@link Context} to use for accessing {@link PackageManager}.
* @return The package name recommended to use for connecting to custom tabs related components.
*/
@SuppressLint("PackageManagerGetSignatures")
@Nullable
public static BrowserDescriptor select(Context context, BrowserMatcher browserMatcher) {
List<BrowserDescriptor> allBrowsers = getAllBrowsers(context);
BrowserDescriptor bestMatch = null;
for (BrowserDescriptor browser : allBrowsers) {
if (!browserMatcher.matches(browser)) {
continue;
}
if (browser.useCustomTab) {
// directly return the first custom tab supporting browser that is matched
return browser;
}
if (bestMatch == null) {
// store this as the best match for use if we don't find any matching
// custom tab supporting browsers
bestMatch = browser;
}
}
return bestMatch;
}
private static boolean hasWarmupService(PackageManager pm, String packageName) {
Intent serviceIntent = new Intent();
serviceIntent.setAction(ACTION_CUSTOM_TABS_CONNECTION);
serviceIntent.setPackage(packageName);
return (pm.resolveService(serviceIntent, 0) != null);
}
private static boolean isFullBrowser(ResolveInfo resolveInfo) {
// The filter must match ACTION_VIEW, CATEGORY_BROWSEABLE, and at least one scheme,
if (resolveInfo.filter == null
|| !resolveInfo.filter.hasAction(Intent.ACTION_VIEW)
|| !resolveInfo.filter.hasCategory(Intent.CATEGORY_BROWSABLE)
|| resolveInfo.filter.schemesIterator() == null) {
return false;
}
// The filter must not be restricted to any particular set of authorities
if (resolveInfo.filter.authoritiesIterator() != null) {
return false;
}
// The filter must support both HTTP and HTTPS.
boolean supportsHttp = false;
boolean supportsHttps = false;
Iterator<String> schemeIter = resolveInfo.filter.schemesIterator();
while (schemeIter.hasNext()) {
String scheme = schemeIter.next();
supportsHttp |= SCHEME_HTTP.equals(scheme);
supportsHttps |= SCHEME_HTTPS.equals(scheme);
if (supportsHttp && supportsHttps) {
return true;
}
}
// at least one of HTTP or HTTPS is not supported
return false;
}
}
| |
//Problem: https://www.hackerrank.com/challenges/bomber-man
//Java 8
/*
We can simply keep three sets of bombs
1 second bombs
2 second bombs
3 second bombs
Then iteratively detonate and plant bombs each cycle
Cycle: 2
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 3
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
Cycle: 4
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 5
.......
...O...
....O..
.......
OO.....
OO.....
Cycle: 6
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 7
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
Cycle: 8
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 9
.......
...O...
....O..
.......
OO.....
OO.....
Cycle: 10
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 11
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
Cycle: 12
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 13
.......
...O...
....O..
.......
OO.....
OO.....
Cycle: 14
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 15
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
Cycle: 16
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 17
.......
...O...
....O..
.......
OO.....
OO.....
Cycle: 18
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Cycle: 19
OOO.OOO
OO...OO
OOO...O
..OO.OO
...OOOO
...OOOO
Cycle: 20
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
OOOOOOO
Grouping based on pattern
1 | 2 | 3 | 4
5 | 6 | 7 | 8
9 | 10| 11| 12
13| 14| 15| 16
17| 18| 19| 20
We see there are only 4 cycles, and all even cycles are the same grid
so if we have a even cycle we can just print a full grid
If we have an odd cycle then there are two different grids to choose from
we can find which grid the number corresponds to by doing n % 4
Time Complexity: O(m*n) //We must build the result which is a m*n matrix
Space Complexity: O(m*n) //We store every bomb in a map so our Maps cumulative size is O(n*m)
*/
import java.io.*;
import java.util.*;
public class Solution {
static Map<Integer,Map<Integer,Integer>> threeSecondBombs = new HashMap<>();
static Map<Integer,Map<Integer,Integer>> twoSecondBombs = new HashMap<>();
static Map<Integer,Map<Integer,Integer>> oneSecondBombs = new HashMap<>();
static Map<Integer,Map<Integer,Integer>> damagedBombs = new HashMap<>();
public static void main(String[] args) {
/* Enter your code here. Read input from STDIN. Print output to STDOUT. Your class should be named Solution. */
Scanner input = new Scanner(System.in);
int row = input.nextInt();
int col = input.nextInt();
int n = input.nextInt();
input.nextLine();
if(n % 2 == 0)//If n is even we always have a full grid of bombs
{
n = 2;
}
else if(n > 3) //We are in a repeated pattern(See example above) so we only do either 5 or 7 iterations
{
n = (n % 4)+4;
}
//Initialze variables according to input grid
char[][] grid = new char[row][col];
for(int i = 0; i < row; i++)
{
String readRow = input.nextLine();
for(int j = 0; j < col; j++)
{
if(readRow.charAt(j) == 'O')
{
if(threeSecondBombs.get(i) == null)
{
Map<Integer,Integer> map = new HashMap<Integer, Integer>();
threeSecondBombs.put(i, map);
threeSecondBombs.get(i).put(j,0);
}
else
{
threeSecondBombs.get(i).put(j,0);
}
}
grid[i][j] = readRow.charAt(j);
}
}
int cycle = 2;
//Plant all the 2 second bombs
if(cycle <= n)//2 second cycle
{
plantBombs(twoSecondBombs, grid);
cycle++;
//System.out.println("Plant 2 sec bombs");
//System.out.println("Cycle: 2");
//printGrid(grid);
}
if(cycle <= n)//3 second cycle
{
detonateBombs(threeSecondBombs, grid);
threeSecondBombs = new HashMap<>();
cycle++;
//System.out.println("Detonate 3 sec bombs");
//System.out.println("Cycle: 3");
//printGrid(grid);
}
//All future cycles
//These will function as switches where false is place bomb and true is detonate bomb
boolean one = false;
boolean two = true;
boolean three = false;
while(cycle <= n)
{
//System.out.println("Cycle: "+cycle);
if(cycle % 3 == 1)//One cycle
{
if(!one)
{
plantBombs(oneSecondBombs, grid);
one = !one;
//System.out.println("Plant 1 sec bombs");
}
else
{
detonateBombs(oneSecondBombs, grid);
one = !one;
//System.out.println("Detonate 1 sec bombs");
}
}
else if(cycle % 3 == 2)//Two cycle
{
if(!two)
{
plantBombs(twoSecondBombs, grid);
two = !two;
//System.out.println("Plant 2 sec bombs");
}
else
{
detonateBombs(twoSecondBombs, grid);
two = !two;
//System.out.println("Detonate 2 sec bombs");
}
}
else if(cycle % 3 == 0)//Three cycle
{
if(!three)
{
plantBombs(threeSecondBombs, grid);
three = !three;
//System.out.println("Plant 3 sec bombs");
}
else
{
detonateBombs(threeSecondBombs, grid);
three = !three;
//System.out.println("Detonate 3 sec bombs");
}
}
cycle++;
//printGrid(grid); //Grid after each cycle
}
//Print the output grid
printGrid(grid);
}
//Plants a bomb on all open tiles
static void plantBombs(Map<Integer,Map<Integer,Integer>> bombSet, char[][] grid)
{
for(int i = 0; i < grid.length; i++)
{
for(int j = 0; j < grid[0].length; j++)
{
if(grid[i][j] == '.')
{
//System.out.println("Planting 2s Bomb");
if(bombSet.get(i) == null)
{
//System.out.println("No bomb in row "+i);
Map<Integer,Integer> map = new HashMap<Integer, Integer>();
bombSet.put(i, map);
bombSet.get(i).put(j,0);
}
else
{
bombSet.get(i).put(j,0);
}
grid[i][j] = 'O';
}
}
}
}
//Detonates bombs of a given Map updating the other maps and the grid
static void detonateBombs(Map<Integer,Map<Integer,Integer>> bombSet, char[][] grid)
{
for(Map.Entry<Integer, Map<Integer,Integer>> x : bombSet.entrySet())
{
int px = x.getKey();
for(Map.Entry<Integer,Integer> y : x.getValue().entrySet())
{
removeDamage(px,y.getKey(),grid);
}
}
for(Map.Entry<Integer, Map<Integer,Integer>> x : damagedBombs.entrySet())
{
int px = x.getKey();
for(Map.Entry<Integer,Integer> y : x.getValue().entrySet())
{
//System.out.println("Removing Bomb at("+px+","+y.getKey()+")");
if(threeSecondBombs.get(px) != null)
{
threeSecondBombs.get(px).remove(y.getKey());
//System.out.println("Removing 3s Bomb");
}
if(twoSecondBombs.get(px) != null)
{
twoSecondBombs.get(px).remove(y.getKey());
//System.out.println("Removing 2s Bomb");
}
if(oneSecondBombs.get(px) != null)
{
oneSecondBombs.get(px).remove(y.getKey());
//System.out.println("Removing 1s Bomb");
}
}
}
damagedBombs = new HashMap<>();//Remove the bombs now that we have removed all damage
}
//Replaces all surrounding O with . and adds surrounding to a list of damaged bombs
static void removeDamage(int x, int y, char[][] grid)
{
grid[x][y] = '.';
removeBomb(x, y);
//Left
if(y-1 >= 0)
{
grid[x][y-1] = '.';
removeBomb(x, y-1);
}
//Right
if(y+1 < grid[0].length)
{
grid[x][y+1] = '.';
removeBomb(x, y+1);
}
//Up
if(x-1 >= 0)
{
grid[x-1][y] = '.';
removeBomb(x-1, y);
}
//Down
if(x+1 < grid.length)
{
grid[x+1][y] = '.';
removeBomb(x+1, y);
}
}
//Adds a bomb to the Map of damaged bombs
static void removeBomb(int x, int y)
{
if(damagedBombs.get(x) == null)
{
Map<Integer,Integer> map = new HashMap<Integer, Integer>();
damagedBombs.put(x, map);
damagedBombs.get(x).put(y,0);
}
else
{
damagedBombs.get(x).put(y,0);
}
}
static void printBombSet(Map<Integer,Map<Integer,Integer>> bombSet)
{
for(Map.Entry<Integer, Map<Integer,Integer>> x : bombSet.entrySet())
{
int px = x.getKey();
for(Map.Entry<Integer,Integer> y : x.getValue().entrySet())
{
System.out.println("("+px+","+y.getKey()+")");
}
}
}
static void printGrid(char[][] grid)
{
for(char[] l : grid)
{
for(char m : l)
{
System.out.print(m);
}
System.out.println("");
}
//System.out.println(""); //Uncomment if you are printing iteratively
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.Integer.bitCount;
import static java.lang.Integer.numberOfTrailingZeros;
import static java.lang.Long.numberOfLeadingZeros;
import static java.lang.Math.max;
import static java.util.Arrays.fill;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.segment.CacheWeights;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.cache.CacheStats;
import com.google.common.cache.Weigher;
/**
* {@code PriorityCache} implements a partial mapping from keys of type {@code K} to values
* of type {@code V}. Mappings are associates with a cost, which states how expensive it is
* to recreate that mapping. This cache uses the cost such that mappings with a higher cost
* have a lower chance of being evicted than mappings with a lower cost. When an item from
* this cache is successfully looked up its cost is incremented by one, unless it has reached
* its maximum cost of {@link Byte#MAX_VALUE} already.
* <p>
* Additionally this cache tracks a generation for mappings. Mappings of later generations
* always take precedence over mappings of earlier generations. That is, putting a mapping of
* a later generation into the cache can cause any mapping of an earlier generation to be evicted
* regardless of its cost.
* <p>
* This cache uses rehashing to resolve clashes. The number of rehashes is configurable. When
* a clash cannot be resolved by rehashing the given number of times the put operation fails.
* <p>
* This cache is thread safe.
* @param <K> type of the keys
* @param <V> type of the values
*/
public class PriorityCache<K, V> {
private final int rehash;
private final Entry<?,?>[] entries;
private final int[] costs = new int[256];
private final int[] evictions = new int[256];
private long hitCount;
private long missCount;
private long loadCount;
private long loadExceptionCount;
private long evictionCount;
private long size;
@Nonnull
private final Weigher<K, V> weigher;
private long weight = 0;
/**
* Static factory for creating new {@code PriorityCache} instances.
* @param size size of the cache. Must be a power of 2.
* @return a new {@code PriorityCache} instance of the given {@code size}.
*/
public static <K, V> Supplier<PriorityCache<K, V>> factory(final int size, @Nonnull final Weigher<K, V> weigher) {
checkArgument(bitCount(size) == 1);
checkNotNull(weigher);
return new Supplier<PriorityCache<K, V>>() {
@Override
public PriorityCache<K, V> get() {
return new PriorityCache<>(size, weigher);
}
};
}
/**
* Static factory for creating new {@code PriorityCache} instances.
* @param size size of the cache. Must be a power of 2.
* @return a new {@code PriorityCache} instance of the given {@code size}.
*/
public static <K, V> Supplier<PriorityCache<K, V>> factory(final int size) {
checkArgument(bitCount(size) == 1);
return new Supplier<PriorityCache<K, V>>() {
@Override
public PriorityCache<K, V> get() {
return new PriorityCache<>(size);
}
};
}
private static class Entry<K, V> {
static final Entry<Void, Void> NULL = new Entry<>(null, null, -1, Byte.MIN_VALUE);
final K key;
final V value;
final int generation;
byte cost;
public Entry(K key, V value, int generation, byte cost) {
this.key = key;
this.value = value;
this.generation = generation;
this.cost = cost;
}
@Override
public String toString() {
return this == NULL
? "NULL"
: "Entry{" + key + "->" + value + " @" + generation + ", $" + cost + "}";
}
}
/**
* Round {@code size} up to the next power of two or 1 for negative values.
* @param size
* @return the next power of two starting from {@code size}.
*/
public static long nextPowerOfTwo(int size) {
return 1L << (64L - numberOfLeadingZeros((long)max(1, size) - 1L));
}
/**
* Create a new instance of the given {@code size}. {@code rehash} specifies the number
* of rehashes to resolve a clash.
* @param size Size of the cache. Must be a power of {@code 2}.
* @param rehash Number of rehashes. Must be greater or equal to {@code 0} and
* smaller than {@code 32 - numberOfTrailingZeros(size)}.
*/
PriorityCache(int size, int rehash) {
this(size, rehash, CacheWeights.<K, V> noopWeigher());
}
/**
* Create a new instance of the given {@code size}. {@code rehash} specifies the number
* of rehashes to resolve a clash.
* @param size Size of the cache. Must be a power of {@code 2}.
* @param rehash Number of rehashes. Must be greater or equal to {@code 0} and
* smaller than {@code 32 - numberOfTrailingZeros(size)}.
* @param weigher Needed to provide an estimation of the cache weight in memory
*/
public PriorityCache(int size, int rehash, @Nonnull Weigher<K, V> weigher) {
checkArgument(bitCount(size) == 1);
checkArgument(rehash >= 0);
checkArgument(rehash < 32 - numberOfTrailingZeros(size));
this.rehash = rehash;
entries = new Entry<?,?>[size];
fill(entries, Entry.NULL);
this.weigher = checkNotNull(weigher);
}
/**
* Create a new instance of the given {@code size}. The number of rehashes is
* the maximum number allowed by the given {@code size}. ({@code 31 - numberOfTrailingZeros(size)}.
* @param size Size of the cache. Must be a power of {@code 2}.
*/
public PriorityCache(int size, @Nonnull Weigher<K, V> weigher) {
this(size, 31 - numberOfTrailingZeros(size), weigher);
}
public PriorityCache(int size) {
this(size, 31 - numberOfTrailingZeros(size));
}
private int project(int hashCode, int iteration) {
return (hashCode >> iteration) & (entries.length - 1);
}
/**
* @return the number of mappings in this cache.
*/
public long size() {
return size;
}
/**
* Add a mapping to the cache.
* @param key the key of the mapping
* @param value the value of the mapping
* @param generation the generation of the mapping
* @param initialCost the initial cost associated with this mapping
* @return {@code true} if the mapping has been added, {@code false} otherwise.
*/
public synchronized boolean put(@Nonnull K key, @Nonnull V value, int generation, byte initialCost) {
int hashCode = key.hashCode();
byte cheapest = initialCost;
int index = -1;
boolean eviction = false;
for (int k = 0; k <= rehash; k++) {
int i = project(hashCode, k);
Entry<?, ?> entry = entries[i];
if (entry == Entry.NULL) {
// Empty slot -> use this index
index = i;
eviction = false;
break;
} else if (entry.generation <= generation && key.equals(entry.key)) {
// Key exists and generation is greater or equal -> use this index and boost the cost
index = i;
initialCost = entry.cost;
if (initialCost < Byte.MAX_VALUE) {
initialCost++;
}
eviction = false;
break;
} else if (entry.generation < generation) {
// Old generation -> use this index
index = i;
eviction = false;
break;
} else if (entry.cost < cheapest) {
// Candidate slot, keep on searching for even cheaper slots
cheapest = entry.cost;
index = i;
eviction = true;
}
}
if (index >= 0) {
Entry<?, ?> old = entries[index];
Entry<?, ?> newE = new Entry<>(key, value, generation, initialCost);
entries[index] = newE;
loadCount++;
costs[initialCost - Byte.MIN_VALUE]++;
if (old != Entry.NULL) {
costs[old.cost - Byte.MIN_VALUE]--;
if (eviction) {
evictions[old.cost - Byte.MIN_VALUE]++;
evictionCount++;
}
weight -= weighEntry(old);
} else {
size++;
}
weight += weighEntry(newE);
return true;
} else {
loadExceptionCount++;
return false;
}
}
/**
* Look up a mapping from this cache by its {@code key} and {@code generation}.
* @param key key of the mapping to look up
* @param generation generation of the mapping to look up
* @return the mapping for {@code key} and {@code generation} or {@code null} if this
* cache does not contain such a mapping.
*/
@SuppressWarnings("unchecked")
@CheckForNull
public synchronized V get(@Nonnull K key, int generation) {
int hashCode = key.hashCode();
for (int k = 0; k <= rehash; k++) {
int i = project(hashCode, k);
Entry<?, ?> entry = entries[i];
if (generation == entry.generation && key.equals(entry.key)) {
if (entry.cost < Byte.MAX_VALUE) {
costs[entry.cost - Byte.MIN_VALUE]--;
entry.cost++;
costs[entry.cost - Byte.MIN_VALUE]++;
}
hitCount++;
return (V) entry.value;
}
}
missCount++;
return null;
}
/**
* Purge all keys from this cache whose entry's generation matches the
* passed {@code purge} predicate.
* @param purge
*/
public synchronized void purgeGenerations(@Nonnull Predicate<Integer> purge) {
for (int i = 0; i < entries.length; i++) {
Entry<?, ?> entry = entries[i];
if (entry != Entry.NULL && purge.apply(entry.generation)) {
entries[i] = Entry.NULL;
size--;
weight -= weighEntry(entry);
}
}
}
@SuppressWarnings("unchecked")
private int weighEntry(Entry<?, ?> entry) {
return weigher.weigh((K) entry.key, (V) entry.value);
}
@Override
public synchronized String toString() {
return "PriorityCache" +
"{ costs=" + toString(costs) +
", evictions=" + toString(evictions) + " }";
}
private static String toString(int[] ints) {
StringBuilder b = new StringBuilder("[");
String sep = "";
for (int i = 0; i < ints.length; i++) {
if (ints[i] > 0) {
b.append(sep).append(i).append("->").append(ints[i]);
sep = ",";
}
}
return b.append(']').toString();
}
/**
* @return access statistics for this cache
*/
@Nonnull
public CacheStats getStats() {
return new CacheStats(hitCount, missCount, loadCount, loadExceptionCount, 0, evictionCount);
}
public long estimateCurrentWeight() {
return weight;
}
}
| |
package act.job;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static act.app.event.SysEventId.START;
import static act.app.event.SysEventId.STOP;
import static act.job.JobManager.sysEventJobId;
import act.app.App;
import act.app.event.SysEventId;
import act.conf.AppConfig;
import act.event.SysEventListenerBase;
import fc.cron.CronExpression;
import org.joda.time.DateTime;
import org.joda.time.Seconds;
import org.osgl.$;
import org.osgl.exception.NotAppliedException;
import org.osgl.logging.LogManager;
import org.osgl.logging.Logger;
import org.osgl.util.E;
import org.osgl.util.S;
import org.rythmengine.utils.Time;
import java.util.EventObject;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A `JobTrigger` triggers a {@link Job} to be executed
*/
public abstract class JobTrigger {
protected static final Logger LOGGER = LogManager.get(JobTrigger.class);
protected Boolean oneTime;
private JobTrigger(Boolean oneTime) {
this.oneTime = oneTime;
}
@Override
public String toString() {
return getClass().getSimpleName();
}
protected static boolean isTraceEnabled() {
return LOGGER.isTraceEnabled();
}
protected static void trace(String msg, Object... args) {
LOGGER.trace(msg, args);
}
final void register(Job job, JobManager manager) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("trigger on [%s]: %s", this, job);
}
if (null != oneTime && !oneTime) {
job.setNonOneTime();
}
job.trigger(this);
schedule(manager, job);
}
void scheduleFollowingCalls(JobManager manager, Job job) {}
void schedule(JobManager manager, Job job) {}
void traceSchedule(Job job) {
if (isTraceEnabled()) {
trace("trigger[%s] schedule job: %s", this, job);
}
}
static JobTrigger of(AppConfig config, Cron anno) {
String v = anno.value();
if (v.startsWith("cron.")) {
v = (String) config.get(v);
} else if (v.startsWith("${") && v.endsWith("}")) {
v = v.substring(2, v.length() - 1);
v = (String) config.get(v);
}
if (S.blank(v)) {
throw E.invalidConfiguration("Cannot find configuration for cron: %s", anno.value());
}
return cron(v);
}
static JobTrigger of(AppConfig config, OnAppStart anno) {
int delayInSeconds = anno.delayInSeconds();
if (delayInSeconds > 0) {
return delayAfter(START, delayInSeconds);
}
if (anno.async()) {
return alongWith(START);
} else {
return after(START);
}
}
static JobTrigger of(AppConfig config, OnAppStop anno) {
if (anno.async()) {
return alongWith(STOP);
} else {
return before(STOP);
}
}
static JobTrigger of(AppConfig config, FixedDelay anno) {
String delay = anno.value();
if (delay.startsWith("delay.")) {
delay = (String) config.get(delay);
} else if (delay.startsWith("${") && delay.endsWith("}")) {
delay = delay.substring(2, delay.length() - 1);
delay = (String) config.get(delay);
}
if (S.blank(delay)) {
throw E.invalidConfiguration("Cannot find configuration for delay: %s", anno.value());
}
return fixedDelay(delay, anno.startImmediately());
}
static JobTrigger of(AppConfig config, Every anno) {
String duration = anno.value();
if (duration.startsWith("every.")) {
duration = (String) config.get(duration);
} else if (duration.startsWith("${") && duration.endsWith("}")) {
duration = duration.substring(2, duration.length() - 1);
duration = (String) config.get(duration);
}
if (S.blank(duration)) {
throw E.invalidConfiguration("Cannot find configuration for duration: %s", anno.value());
}
return every(duration, anno.startImmediately());
}
static JobTrigger of(AppConfig config, AlongWith anno) {
String id = anno.value();
E.illegalArgumentIf(S.blank(id), "associate job ID cannot be empty");
int delayInSeconds = anno.delayInSeconds();
if (delayInSeconds > 0) {
return new _DelayAfter(id, delayInSeconds);
} else {
return new _AlongWith(id);
}
}
static JobTrigger of(AppConfig config, InvokeAfter anno) {
String id = anno.value();
E.illegalArgumentIf(S.blank(id), "associate job ID cannot be empty");
return new _After(id);
}
static JobTrigger of(AppConfig config, InvokeBefore anno) {
String id = anno.value();
E.illegalArgumentIf(S.blank(id), "associate job ID cannot be empty");
return new _Before(id);
}
static JobTrigger cron(String expression) {
return new _Cron(expression);
}
static JobTrigger fixedDelay(String duration, boolean startImmediately) {
return new _FixedDelay(duration, startImmediately);
}
static JobTrigger fixedDelay(long seconds, boolean startImmediately) {
return new _FixedDelay(seconds, startImmediately);
}
static JobTrigger fixedDelay(long interval, TimeUnit timeUnit, boolean startImmediately) {
return new _FixedDelay(timeUnit.toSeconds(interval), startImmediately);
}
static JobTrigger every(String duration, boolean startImmediately) {
return new _Every(duration, startImmediately);
}
static JobTrigger every(long seconds, boolean startImmediately) {
return new _Every(seconds, TimeUnit.SECONDS, startImmediately);
}
static JobTrigger every(long duration, TimeUnit timeUnit, boolean startImmediately) {
return new _Every(duration, timeUnit, startImmediately);
}
static JobTrigger onAppStart(boolean async, int delayInSeconds) {
if (delayInSeconds > 0) {
return delayAfter(START, delayInSeconds);
}
return async ? alongWith(START) : after(START);
}
static JobTrigger onAppStop(boolean async) {
return async ? alongWith(STOP) : before(STOP);
}
static JobTrigger onSysEvent(SysEventId eventId, boolean async) {
return async ? alongWith(eventId) : after(eventId);
}
static JobTrigger delayForSeconds(long seconds, boolean startImmediately) {
return new _FixedDelay(seconds, startImmediately);
}
static JobTrigger alongWith(String jobId) {
return new _AlongWith(jobId);
}
static JobTrigger alongWith(SysEventId sysEvent) {
return new _AlongWith(sysEventJobId(sysEvent));
}
static JobTrigger before(String jobId) {
return new _Before(jobId);
}
static JobTrigger before(SysEventId sysEvent) {
return before(sysEventJobId(sysEvent));
}
static JobTrigger after(String jobId) {
return new _After(jobId);
}
static JobTrigger after(SysEventId sysEvent) {
return after(sysEventJobId(sysEvent));
}
static JobTrigger delayAfter(String jobId, int delayInSeconds) {
return new _DelayAfter(jobId, delayInSeconds);
}
static JobTrigger delayAfter(SysEventId sysEvent, int delayInSeconds) {
return delayAfter(sysEventJobId(sysEvent), delayInSeconds);
}
static class _Cron extends JobTrigger {
private CronExpression cronExpr;
_Cron(String expression) {
super(false);
cronExpr = new CronExpression(expression);
}
@Override
public String toString() {
return S.newBuffer("cron :").a(cronExpr).toString();
}
@Override
void schedule(final JobManager manager, final Job job) {
traceSchedule(job);
App app = manager.app();
if (!app.isStarted()) {
app.eventBus().bindAsync(SysEventId.POST_START, new SysEventListenerBase() {
@Override
public void on(EventObject event) throws Exception {
delayedSchedule(manager, job);
}
});
} else {
delayedSchedule(manager, job);
}
}
private void delayedSchedule(JobManager manager, Job job) {
DateTime now = DateTime.now();
// add one seconds to prevent the next time be the current time (now)
DateTime next = cronExpr.nextTimeAfter(now.plusSeconds(1));
Seconds seconds = Seconds.secondsBetween(now, next);
ScheduledFuture future = manager.executor().schedule(job, seconds.getSeconds(), TimeUnit.SECONDS);
manager.futureScheduled(job.id(), future);
}
@Override
void scheduleFollowingCalls(JobManager manager, Job job) {
schedule(manager, job);
}
}
private abstract static class _Periodical extends JobTrigger {
protected long seconds;
protected boolean startImmediately;
_Periodical(String duration, boolean startImmediately) {
super(false);
E.illegalArgumentIf(S.blank(duration), "delay duration shall not be empty");
seconds = Time.parseDuration(duration);
E.illegalArgumentIf(seconds < 1, "delay duration shall not be zero or negative number");
this.startImmediately = startImmediately;
}
_Periodical(long seconds, boolean startImmediately) {
super(false);
E.illegalArgumentIf(seconds < 1, "delay duration cannot be zero or negative");
this.seconds = seconds;
this.startImmediately = startImmediately;
}
@Override
final void schedule(final JobManager manager, final Job job) {
traceSchedule(job);
App app = manager.app();
if (!app.isStarted()) {
app.eventBus().bindAsync(SysEventId.POST_START, new SysEventListenerBase() {
@Override
public void on(EventObject event) {
runAndSchedule(manager, job);
}
});
} else {
runAndSchedule(manager, job);
}
}
protected abstract void delayedSchedule(JobManager manager, Job job);
protected void runAndSchedule(JobManager manager, Job job) {
if (startImmediately) {
manager.now(job);
}
delayedSchedule(manager, job);
}
}
private static class _FixedDelay extends _Periodical {
_FixedDelay(String duration, boolean startImmediately) {
super(duration, startImmediately);
}
_FixedDelay(long seconds, boolean startImmediately) {
super(seconds, startImmediately);
}
@Override
public String toString() {
return S.concat("fixed delay of ", S.string(seconds), " seconds");
}
@Override
protected void delayedSchedule(JobManager manager, Job job) {
ScheduledThreadPoolExecutor executor = manager.executor();
ScheduledFuture future = executor.scheduleWithFixedDelay(job, seconds, seconds, TimeUnit.SECONDS);
manager.futureScheduled(job.id(), future);
}
}
private static class _Every extends _Periodical {
_Every(String duration, boolean startImmediately) {
super(duration, startImmediately);
}
_Every(long duration, TimeUnit timeUnit, boolean startImmediately) {
super(timeUnit.toSeconds(duration), startImmediately);
}
@Override
public String toString() {
return S.concat("every ", S.string(seconds), " seconds");
}
@Override
protected void delayedSchedule(JobManager manager, Job job) {
ScheduledThreadPoolExecutor executor = manager.executor();
ScheduledFuture future = executor.scheduleAtFixedRate(job, seconds, seconds, TimeUnit.SECONDS);
manager.futureScheduled(job.id(), future);
}
}
private abstract static class _AssociatedTo extends JobTrigger {
String targetId;
_AssociatedTo(String targetId) {
super(null);
E.illegalArgumentIf(S.blank(targetId), "associate job ID expected");
this.targetId = targetId;
}
@Override
void schedule(JobManager manager, Job job) {
traceSchedule(job);
Job associateTarget = manager.jobById(targetId, false);
if (null == associateTarget) {
LOGGER.warn("Failed to register job because target job not found: %s. Will try again after app started", targetId);
scheduleDelayedRegister(manager, job);
} else {
associate(job, associateTarget);
}
}
private void scheduleDelayedRegister(final JobManager manager, final Job job) {
final String id = delayedRegisterJobId(job);
before(START).register(new Job(id, manager, new $.F0<Void>() {
@Override
public Void apply() throws NotAppliedException, $.Break {
Job associateTo = manager.jobById(targetId);
if (null == associateTo) {
LOGGER.warn("Cannot find associated job: %s", id);
} else {
associate(job, associateTo);
}
return null;
}
}), manager);
}
private String delayedRegisterJobId(Job job) {
return S.concat("delayed_association_register-", job.id(), "-to-", targetId);
}
abstract void associate(Job theJob, Job toJob);
}
private static class _AlongWith extends _AssociatedTo {
_AlongWith(String targetId) {
super(targetId);
}
@Override
public String toString() {
return S.concat("along with ", targetId);
}
@Override
void associate(Job theJob, Job toJob) {
toJob.addParallelJob(theJob);
}
}
private static class _Before extends _AssociatedTo {
_Before(String targetId) {
super(targetId);
}
@Override
public String toString() {
return S.concat("before ", targetId);
}
@Override
void associate(Job theJob, Job toJob) {
toJob.addPrecedenceJob(theJob);
}
}
private static class _After extends _AssociatedTo {
_After(String targetId) {
super(targetId);
}
@Override
public String toString() {
return S.concat("after ", targetId);
}
@Override
void associate(Job theJob, Job toJob) {
toJob.addFollowingJob(theJob);
}
}
private static class _DelayAfter extends _AssociatedTo {
private static final AtomicInteger seq = new AtomicInteger();
private int delayInSeconds;
_DelayAfter(String targetId, int delayInSeconds) {
super(targetId);
this.delayInSeconds = delayInSeconds;
}
@Override
public String toString() {
return S.concat("delay %ss after ", delayInSeconds, targetId);
}
@Override
void associate(final Job theJob, final Job toJob) {
toJob.addPrecedenceJob(new Job(toJob.id() + "-delay-" + delayInSeconds + "-" + seq.getAndIncrement(), toJob.manager()) {
@Override
public void run() {
toJob.manager().delay(theJob, delayInSeconds, TimeUnit.SECONDS);
}
});
}
}
}
| |
/*
* Copyright 2006-2010 Virtual Laboratory for e-Science (www.vl-e.nl)
* Copyright 2012-2013 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENSE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package nl.esciencecenter.vlet.vrs.vfs;
import nl.esciencecenter.ptk.data.BooleanHolder;
import nl.esciencecenter.ptk.util.StringUtil;
import nl.esciencecenter.ptk.util.logging.ClassLogger;
import nl.esciencecenter.vbrowser.vrs.data.Attribute;
import nl.esciencecenter.vbrowser.vrs.exceptions.VrsException;
import nl.esciencecenter.vbrowser.vrs.vrl.VRL;
import nl.esciencecenter.vlet.vrs.VNode;
import nl.esciencecenter.vlet.vrs.VRS;
import nl.esciencecenter.vlet.vrs.VRSContext;
import nl.esciencecenter.vlet.vrs.VResourceSystem;
/**
* The Virtual File System Global Class.
* <p>
* Holds the VFS Global methods and constants.
*
* @author P.T. de Boer
*/
public class VFS extends VRS
{
// ========================================================================
// Class Fields
// ========================================================================
/** A Default Filesystem supports Directories and Files */
static String defaultChildTypes[] =
{
DIR_TYPE,
FILE_TYPE
};
// ========================================================================
// Class Methods
// ========================================================================
/**
* Translate Unix style file mode to VRS ACL list.
*
* @see VACL
*/
public static Attribute[][] convertFileMode2ACL(int mode, boolean isDir)
{
// Global.debugPrintf(VFS.class,"converting file mode: %o\n",mode);
int numEntries = 3; // user, group,other
int numAttrs = 5; // name, readable,writeable, exe/passable, <misc>
Attribute acl[][] = new Attribute[numEntries][];
String execName = null;
String setUID[] = { "-", VACL.SETUID };
String setGID[] = { "-", VACL.SETGID };
String setSticky[] = { "-", VACL.STICKY };
if (isDir)
execName = VACL.PERM_ACCESSIBLE;
else
execName = VACL.PERM_EXECUTABLE;
for (int i = 0; i < numEntries; i++)
acl[i] = new Attribute[numAttrs];
// USER_ENTITY is both used for name as for value
acl[0][0] = new Attribute(VACL.USER_ENTITY, VACL.USER_ENTITY);
acl[0][1] = new Attribute(VACL.PERM_READABLE, (mode & 00400) > 0);
acl[0][2] = new Attribute(VACL.PERM_WRITABLE, (mode & 00200) > 0);
acl[0][3] = new Attribute(execName, (mode & 00100) > 0);
acl[0][4] = new Attribute("Misc", setUID, ((mode & 04000) > 0) ? 1 : 0);
acl[1][0] = new Attribute(VACL.USER_ENTITY, VACL.GROUP_ENTITY);
acl[1][1] = new Attribute(VACL.PERM_READABLE, (mode & 00040) > 0);
acl[1][2] = new Attribute(VACL.PERM_WRITABLE, (mode & 00020) > 0);
acl[1][3] = new Attribute(execName, (mode & 0010) > 0);
acl[1][4] = new Attribute("Misc", setGID, ((mode & 02000) > 0) ? 1 : 0);
acl[2][0] = new Attribute(VACL.USER_ENTITY, VACL.WORLD_ENTITY);
acl[2][1] = new Attribute(VACL.PERM_READABLE, (mode & 00004) > 0);
acl[2][2] = new Attribute(VACL.PERM_WRITABLE, (mode & 00002) > 0);
acl[2][3] = new Attribute(execName, (mode & 001) > 0);
acl[2][4] = new Attribute("Misc", setSticky, ((mode & 01000) > 0) ? 1 : 0);
// enable editing:
for (int i = 0; i < numEntries; i++)
for (int j = 1; j < 4; j++)
// skip misc for now
acl[i][j].setEditable(true);
return acl;
}
/**
* Translate VFS ACL list to Unix file mode.
*
* @see VACL
*/
public static int convertACL2FileMode(Attribute[][] acl, boolean isDir)
{
int nrEntities = 3;
int nrAttrs = 5;
int attrOffset = 1;
int rang = 0400;
int mode = 0;
if ((acl == null) || (acl[0] == null) || (acl[0].length < 1))
return -1;
for (int i = 0; i < nrEntities; i++)
{
if (i >= acl.length)
{
errorPrintf("Error converting ACL to unix file mode: acl.length<3. length=%d\n", acl.length);
return -1;
}
String attrName = acl[i][0].getName();
if (attrName.compareTo(VACL.USER_ENTITY) != 0)
{
errorPrintf("Error converting ACL to unix file mode: first attribute MUST be user type:%s\n", attrName);
return -1;
}
String entity = acl[i][0].getStringValue();
if (entity.compareTo(VACL.USER_ENTITY) == 0)
rang = 0100;
else if (entity.compareTo(VACL.GROUP_ENTITY) == 0)
rang = 0010;
else if (entity.compareTo(VACL.WORLD_ENTITY) == 0)
rang = 0001;
else
{
errorPrintf("Error converting ACL to unix file mode: can't recognise user type:%s\n", entity);
return -1;
}
for (int j = attrOffset; j < nrAttrs; j++)
{
if (j >= acl[i].length)
{
errorPrintf("Error converting ACL to unix file mode: acl[" + i
+ "] attribute length<4. length=%d\n", acl[0].length);
return -1;
}
Attribute attr = acl[i][j];
String perm = attr.getName();
int val = 0;
boolean misc = false;
if (perm.compareTo(VACL.PERM_READABLE) == 0)
val = 4;
else if (perm.compareTo(VACL.PERM_WRITABLE) == 0)
val = 2;
else if (perm.compareTo(VACL.PERM_ACCESSIBLE) == 0)
val = 1; // for directories
else if (perm.compareTo(VACL.PERM_EXECUTABLE) == 0)
val = 1; // for files
else if (perm.compareTo("Misc") == 0)
{
misc = true;
String str = attr.getStringValue();
if (StringUtil.equals(str, VACL.SETUID))
mode = mode | 04000;
else if (StringUtil.equals(str, VACL.SETGID))
mode = mode | 02000;
else if (StringUtil.equals(str, VACL.STICKY))
mode = mode | 01000;
else
warnPrintf("Warning converting ACL to unix file mode: can't recognise miscellaneous type:%s\n",
str);
}
else
{
warnPrintf("Warning converting ACL to unix file mode: can't recognise permission type:%s\n", perm);
// continue;
}
if ((misc == false) && (attr.getBooleanValue()))
mode = mode | (val * rang);
}
}
return mode;
}
private static void warnPrintf(String format, Object... args)
{
ClassLogger.getLogger(VFS.class).warnPrintf(format, args);
}
private static void errorPrintf(String format, Object... args)
{
ClassLogger.getLogger(VFS.class).errorPrintf(format, args);
}
public static int parseUnixPermissions(String permStr, BooleanHolder isDir, BooleanHolder isLink)
{
if (isDir == null)
isDir = new BooleanHolder();
if (isLink == null)
isLink = new BooleanHolder();
if (permStr == null)
return -1;
isDir.value = false;
isLink.value = false;
if (permStr.startsWith("d"))
isDir.value = true;
else if (permStr.startsWith("l"))
isLink.value = true;
else if (permStr.startsWith("-"))
{
isDir.value = false;
isLink.value = false;
}
else
{
// Global.debugPrintf("VFS","Could not parse Permissions String:%s\n",permStr);
return -1;
}
if (permStr.length() < 10)
{
// Global.debugPrintf("VFS","Permissions string to short:%s\n",permStr);
return -1;
}
int mode = 0;
// NOT permStr=permStr.toLowerCase();
mode = ((permStr.charAt(1) == 'r') ? 0400 : 0) + ((permStr.charAt(2) == 'w') ? 0200 : 0)
+ ((permStr.charAt(3) == 'x') ? 0100 : 0)
+ ((permStr.charAt(4) == 'r') ? 0040 : 0)
+ ((permStr.charAt(5) == 'w') ? 0020 : 0)
+ ((permStr.charAt(6) == 'x') ? 0010 : 0)
+ ((permStr.charAt(7) == 'r') ? 0004 : 0)
+ ((permStr.charAt(8) == 'w') ? 0002 : 0)
+ ((permStr.charAt(9) == 'x') ? 0001 : 0)
// S[UG]ID and Sticky bits
// lower = 'x' is set : "--s--s--t"
+ ((permStr.charAt(3) == 's') ? 04100 : 0) + ((permStr.charAt(6) == 's') ? 02010 : 0)
+ ((permStr.charAt(9) == 't') ? 01001 : 0)
// upper = 'x' is unset: "--S--S--T"
+ ((permStr.charAt(3) == 'S') ? 04000 : 0) + ((permStr.charAt(6) == 'S') ? 02000 : 0)
+ ((permStr.charAt(9) == 'T') ? 01000 : 0);
// Global.debugPrintf(VFS.class,"Permstring:'%s'(dir=%b) = %o\n",permStr,isDir,mode);
return mode;
}
// ===
// Static Misc. methods
// ===
public static String modeToString(int mode, boolean isdir)
{
return modeToString(mode, isdir, false);
}
public static String modeToString(int mode, boolean isDir, boolean isLink)
{
char bits[] = new char[10];
bits[0] = (isDir == true) ? 'd' : '-';
// link 'l' overrides dir 'd':
bits[0] = (isLink == true) ? 'l' : bits[0];
bits[1] = ((mode & 00400) > 0) ? 'r' : '-';
bits[2] = ((mode & 00200) > 0) ? 'w' : '-';
bits[3] = ((mode & 00100) > 0) ? 'x' : '-';
bits[4] = ((mode & 00040) > 0) ? 'r' : '-';
bits[5] = ((mode & 00020) > 0) ? 'w' : '-';
bits[6] = ((mode & 00010) > 0) ? 'x' : '-';
bits[7] = ((mode & 00004) > 0) ? 'r' : '-';
bits[8] = ((mode & 00002) > 0) ? 'w' : '-';
bits[9] = ((mode & 00001) > 0) ? 'x' : '-';
// S[GU]ID and sTicky bits (override 'x'):
bits[3] = ((mode & 04100) == 04100) ? 's' : bits[3];
bits[6] = ((mode & 02010) == 02010) ? 's' : bits[6];
bits[9] = ((mode & 01001) == 01001) ? 't' : bits[9];
bits[3] = ((mode & 04100) == 04000) ? 'S' : bits[3];
bits[6] = ((mode & 02010) == 02000) ? 'S' : bits[6];
bits[9] = ((mode & 01001) == 01000) ? 'T' : bits[9];
return new String(bits);
}
// ===
// VFS Helper methods, interface might changed:
// ===
// helper methods:
public static int getOptimalReadBufferSizeFor(VNode node) throws VrsException
{
// todo: Stream optimization !
return VRS.DEFAULT_STREAM_READ_CHUNK_SIZE;
}
// helper methods
public static int getOptimalWriteBufferSizeFor(VNode node) throws VrsException
{
// todo: Stream optimization !
return VRS.DEFAULT_STREAM_WRITE_CHUNK_SIZE;
}
/**
* Static method to open a new FileSystem given the context and the location
*/
public static VFileSystem openFileSystem(VRSContext context, VRL location) throws VrsException
{
VResourceSystem rs = openResourceSystem(context, location);
if (rs == null)
throw new nl.esciencecenter.vlet.exception.VrsServiceTypeMismatchException("Could find filesystem implementation for:"
+ location);
if ((rs instanceof VFileSystem) == false)
throw new nl.esciencecenter.vlet.exception.VrsServiceTypeMismatchException("Remote Resource is not a File System:"
+ location);
return (VFileSystem) rs;
}
/** Create new VFSClient to access the Virtual File System */
public static VFSClient createVFSClient(VRSContext context)
{
return new VFSClient(context);
}
/**
* VFile factory method: Create new VFile Object using the specified
* context. The file might not exist on the (remote) FileSystem yet. Use
* VFile.exists() or VFile.create() to check or create the file.
*
* @see VFileSystem#newFile(VRL)
* @param context
* VRSContext
* @param fileVRL
* New or existing File Location
* @throws VrsException
* If the remote filesystem couldn't be contacted or another
* error occurred.
*/
public static VFile newVFile(VRSContext context, VRL fileVRL) throws VrsException
{
VFileSystem vfs = openFileSystem(context, fileVRL);
return vfs.newFile(fileVRL);
}
/**
* VDir factory method: Create new VDir Object using the specified context.
* The directory might not exist on the (remote) FileSystem. For this to
* work the VFileSystem must support the creation of new VDir objects. Use
* VDir.exists() or VDir.create() to check or create the directory.
*
* @see VFileSystem#newDir(VRL)
* @param context
* VRSContext
* @param dirVRL
* New or existing Directory Location
* @throws VrsException
* If the remote filesystem couldn't be contacted or another
* error occurred.
*/
public static VDir newVDir(VRSContext context, VRL dirVRL) throws VrsException
{
VFileSystem vfs = openFileSystem(context, dirVRL);
return vfs.newDir(dirVRL);
}
}
| |
package test.com.github.spartatech.testutils.temporal;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.junit.Assert;
import org.junit.ComparisonFailure;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.spartatech.testutils.exception.FieldNotFoundException;
import com.github.spartatech.testutils.temporal.DateAssertUtils;
/**
*
* Unit tests for Date Assert Utils.
*
* @author Daniel Conde Diehl
*
* History:
* Dec 29, 2016 - Daniel Conde Diehl
*
*/
public class TestDateAssertUtils extends DateAssertUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(TestDateAssertUtils.class);
private static final String MESSAGE = "test message";
@Test
public void testAssertDateAssertNoFieldsSameDate() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
final Date actual = cal.getTime();
DateAssertUtils.assertDate(expected, actual);
}
@Test(expected=AssertionError.class)
public void testAssertDateAssertNoFieldsDifferentDate() throws Exception {
final Date expected = new Date();
Thread.sleep(10);
final Date actual = new Date();
DateAssertUtils.assertDate(expected, actual);
}
@Test(expected=AssertionError.class)
public void testAssertDateAssertNoFieldsDifferentDateWithMessage() throws Exception {
final Date expected = new Date();
Thread.sleep(10);
final Date actual = new Date();
try {
DateAssertUtils.assertDate(MESSAGE, expected, actual);
} catch (AssertionError e) {
if (!e.getMessage().equals(MESSAGE)) {
Assert.fail("Invalid Message");
}
throw e;
}
}
@Test
public void testAssertDateAssertFieldsMatching() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
int hourDiff = -2;
if (cal.get(Calendar.HOUR_OF_DAY) < 2) {
hourDiff = 2;
}
cal.add(Calendar.HOUR_OF_DAY, hourDiff);
final Date actual = cal.getTime();
LOGGER.info("Fields Matching Unit Test. Expected:{}, actual: {}", expected, actual);
DateAssertUtils.assertDate(expected, actual, Calendar.DATE, Calendar.MONTH, Calendar.YEAR);
}
@Test(expected=ComparisonFailure.class)
public void testAssertDateAssertFieldsNoMatching() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
int hourDiff = -2;
if (cal.get(Calendar.HOUR_OF_DAY) < 2) {
hourDiff = 2;
}
cal.add(Calendar.HOUR_OF_DAY, hourDiff);
final Date actual = cal.getTime();
try {
DateAssertUtils.assertDate(expected, actual, Calendar.DATE, Calendar.MONTH, Calendar.YEAR, Calendar.HOUR_OF_DAY);
} catch (ComparisonFailure e) {
if (!e.getMessage().startsWith("Field HOUR_OF_DAY mismatch")) {
Assert.fail("Invalid Message - " + e.getMessage());
}
if (!e.getExpected().equals(String.valueOf(cal.get(Calendar.HOUR_OF_DAY)+(hourDiff *-1)))) {
Assert.fail("Invalid expected "+ String.valueOf(cal.get(Calendar.HOUR_OF_DAY)+(hourDiff *-1)) + " But was: "+ e.getExpected());
}
if (!e.getActual().equals(String.valueOf(cal.get(Calendar.HOUR_OF_DAY)))) {
Assert.fail("Invalid actual "+ String.valueOf(cal.get(Calendar.HOUR_OF_DAY)) + " But was: "+ e.getActual());
}
throw e;
}
}
@Test(expected=ComparisonFailure.class)
public void testAssertDateAssertFieldsNoMatchingWithMessage() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
int hourDiff = -2;
if (cal.get(Calendar.HOUR_OF_DAY) < 2) {
hourDiff = 2;
}
cal.add(Calendar.HOUR_OF_DAY, hourDiff);
final Date actual = cal.getTime();
try {
DateAssertUtils.assertDate(MESSAGE, expected, actual, Calendar.DATE, Calendar.MONTH, Calendar.YEAR, Calendar.HOUR_OF_DAY);
} catch (ComparisonFailure e) {
if (!e.getMessage().startsWith(MESSAGE)) {
Assert.fail("Invalid Message - " + e.getMessage());
}
if (!e.getExpected().equals(String.valueOf(cal.get(Calendar.HOUR_OF_DAY)+(hourDiff *-1)))) {
Assert.fail("Invalid expected "+ String.valueOf(cal.get(Calendar.HOUR_OF_DAY)+(hourDiff *-1)) + " But was: "+ e.getExpected());
}
if (!e.getActual().equals(String.valueOf(cal.get(Calendar.HOUR_OF_DAY)))) {
Assert.fail("Invalid actual "+ String.valueOf(cal.get(Calendar.HOUR_OF_DAY)) + " But was: "+ e.getActual());
}
throw e;
}
}
@Test
public void testAssertDateByFormatMatching() throws Exception {
final Date expected = new Date();
final Date actual = new Date();
final String format = "yyyy-MM-dd";
DateAssertUtils.assertDateByFormat(expected, actual, format);
}
@Test(expected=ComparisonFailure.class)
public void testAssertDateByFormatNotMatching() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
cal.add(Calendar.MINUTE, -2);
final Date actual = cal.getTime();
final String format = "yyyy-MM-dd HH:mm:ss";
final SimpleDateFormat sdf = new SimpleDateFormat(format);
try {
DateAssertUtils.assertDateByFormat(expected, actual, format);
} catch (ComparisonFailure e) {
if (!e.getMessage().startsWith("Date mismatch")) {
Assert.fail("Invalid Message - " + e.getMessage());
}
if (!e.getExpected().equals(sdf.format(expected))) {
Assert.fail("Invalid expected "+ sdf.format(expected) + " But was: "+ e.getExpected());
}
if (!e.getActual().equals(sdf.format(actual))) {
Assert.fail("Invalid actual "+ sdf.format(actual) + " But was: "+ e.getActual());
}
throw e;
}
}
@Test(expected=ComparisonFailure.class)
public void testAssertDateByFormatNotMatchingWithMessage() throws Exception {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
cal.add(Calendar.MINUTE, -2);
final Date actual = cal.getTime();
final String format = "yyyy-MM-dd HH:mm:ss";
final SimpleDateFormat sdf = new SimpleDateFormat(format);
try {
DateAssertUtils.assertDateByFormat(MESSAGE, expected, actual, format);
} catch (ComparisonFailure e) {
if (!e.getMessage().startsWith(MESSAGE)) {
Assert.fail("Invalid Message - " + e.getMessage());
}
if (!e.getExpected().equals(sdf.format(expected))) {
Assert.fail("Invalid expected "+ sdf.format(expected) + " But was: "+ e.getExpected());
}
if (!e.getActual().equals(sdf.format(actual))) {
Assert.fail("Invalid actual "+ sdf.format(actual) + " But was: "+ e.getActual());
}
throw e;
}
}
@Test(expected=FieldNotFoundException.class)
public void testAssertDateInvalidField() throws FieldNotFoundException {
final Calendar cal = Calendar.getInstance();
final Date expected = cal.getTime();
final Date actual = cal.getTime();
DateAssertUtils.assertDate(expected, actual, 666);
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.07 at 08:01:35 PM IST
//
package com.mozu.qbintegration.model.qbmodel.allgen;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CustomerQueryRqType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CustomerQueryRqType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <group ref="{}CustomerListQuery"/>
* </sequence>
* <attribute name="requestID" type="{}STRTYPE" />
* <attribute name="metaData" default="NoMetaData">
* <simpleType>
* <restriction base="{}STRTYPE">
* <enumeration value="NoMetaData"/>
* <enumeration value="MetaDataOnly"/>
* <enumeration value="MetaDataAndResponseData"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="iterator">
* <simpleType>
* <restriction base="{}STRTYPE">
* <enumeration value="Start"/>
* <enumeration value="Continue"/>
* <enumeration value="Stop"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="iteratorID" type="{}STRTYPE" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CustomerQueryRqType", propOrder = {
"listID",
"fullName",
"maxReturned",
"activeStatus",
"fromModifiedDate",
"toModifiedDate",
"nameFilter",
"nameRangeFilter",
"totalBalanceFilter",
"currencyFilter",
"classFilter",
"includeRetElement",
"ownerID"
})
public class CustomerQueryRqType {
@XmlElement(name = "ListID")
protected List<String> listID;
@XmlElement(name = "FullName")
protected List<String> fullName;
@XmlElement(name = "MaxReturned")
protected BigInteger maxReturned;
@XmlElement(name = "ActiveStatus", defaultValue = "ActiveOnly")
protected String activeStatus;
@XmlElement(name = "FromModifiedDate")
protected String fromModifiedDate;
@XmlElement(name = "ToModifiedDate")
protected String toModifiedDate;
@XmlElement(name = "NameFilter")
protected NameFilter nameFilter;
@XmlElement(name = "NameRangeFilter")
protected NameRangeFilter nameRangeFilter;
@XmlElement(name = "TotalBalanceFilter")
protected TotalBalanceFilter totalBalanceFilter;
@XmlElement(name = "CurrencyFilter")
protected CurrencyFilter currencyFilter;
@XmlElement(name = "ClassFilter")
protected ClassFilter classFilter;
@XmlElement(name = "IncludeRetElement")
protected List<String> includeRetElement;
@XmlElement(name = "OwnerID")
protected List<String> ownerID;
@XmlAttribute(name = "requestID")
protected String requestID;
@XmlAttribute(name = "metaData")
protected String metaData;
@XmlAttribute(name = "iterator")
protected String iterator;
@XmlAttribute(name = "iteratorID")
protected String iteratorID;
/**
* Gets the value of the listID property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the listID property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getListID().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getListID() {
if (listID == null) {
listID = new ArrayList<String>();
}
return this.listID;
}
/**
* Gets the value of the fullName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the fullName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFullName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getFullName() {
if (fullName == null) {
fullName = new ArrayList<String>();
}
return this.fullName;
}
/**
* Gets the value of the maxReturned property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getMaxReturned() {
return maxReturned;
}
/**
* Sets the value of the maxReturned property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setMaxReturned(BigInteger value) {
this.maxReturned = value;
}
/**
* Gets the value of the activeStatus property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getActiveStatus() {
return activeStatus;
}
/**
* Sets the value of the activeStatus property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setActiveStatus(String value) {
this.activeStatus = value;
}
/**
* Gets the value of the fromModifiedDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFromModifiedDate() {
return fromModifiedDate;
}
/**
* Sets the value of the fromModifiedDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFromModifiedDate(String value) {
this.fromModifiedDate = value;
}
/**
* Gets the value of the toModifiedDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToModifiedDate() {
return toModifiedDate;
}
/**
* Sets the value of the toModifiedDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToModifiedDate(String value) {
this.toModifiedDate = value;
}
/**
* Gets the value of the nameFilter property.
*
* @return
* possible object is
* {@link NameFilter }
*
*/
public NameFilter getNameFilter() {
return nameFilter;
}
/**
* Sets the value of the nameFilter property.
*
* @param value
* allowed object is
* {@link NameFilter }
*
*/
public void setNameFilter(NameFilter value) {
this.nameFilter = value;
}
/**
* Gets the value of the nameRangeFilter property.
*
* @return
* possible object is
* {@link NameRangeFilter }
*
*/
public NameRangeFilter getNameRangeFilter() {
return nameRangeFilter;
}
/**
* Sets the value of the nameRangeFilter property.
*
* @param value
* allowed object is
* {@link NameRangeFilter }
*
*/
public void setNameRangeFilter(NameRangeFilter value) {
this.nameRangeFilter = value;
}
/**
* Gets the value of the totalBalanceFilter property.
*
* @return
* possible object is
* {@link TotalBalanceFilter }
*
*/
public TotalBalanceFilter getTotalBalanceFilter() {
return totalBalanceFilter;
}
/**
* Sets the value of the totalBalanceFilter property.
*
* @param value
* allowed object is
* {@link TotalBalanceFilter }
*
*/
public void setTotalBalanceFilter(TotalBalanceFilter value) {
this.totalBalanceFilter = value;
}
/**
* Gets the value of the currencyFilter property.
*
* @return
* possible object is
* {@link CurrencyFilter }
*
*/
public CurrencyFilter getCurrencyFilter() {
return currencyFilter;
}
/**
* Sets the value of the currencyFilter property.
*
* @param value
* allowed object is
* {@link CurrencyFilter }
*
*/
public void setCurrencyFilter(CurrencyFilter value) {
this.currencyFilter = value;
}
/**
* Gets the value of the classFilter property.
*
* @return
* possible object is
* {@link ClassFilter }
*
*/
public ClassFilter getClassFilter() {
return classFilter;
}
/**
* Sets the value of the classFilter property.
*
* @param value
* allowed object is
* {@link ClassFilter }
*
*/
public void setClassFilter(ClassFilter value) {
this.classFilter = value;
}
/**
* Gets the value of the includeRetElement property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the includeRetElement property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getIncludeRetElement().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getIncludeRetElement() {
if (includeRetElement == null) {
includeRetElement = new ArrayList<String>();
}
return this.includeRetElement;
}
/**
* Gets the value of the ownerID property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the ownerID property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOwnerID().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getOwnerID() {
if (ownerID == null) {
ownerID = new ArrayList<String>();
}
return this.ownerID;
}
/**
* Gets the value of the requestID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRequestID() {
return requestID;
}
/**
* Sets the value of the requestID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRequestID(String value) {
this.requestID = value;
}
/**
* Gets the value of the metaData property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMetaData() {
if (metaData == null) {
return "NoMetaData";
} else {
return metaData;
}
}
/**
* Sets the value of the metaData property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMetaData(String value) {
this.metaData = value;
}
/**
* Gets the value of the iterator property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIterator() {
return iterator;
}
/**
* Sets the value of the iterator property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIterator(String value) {
this.iterator = value;
}
/**
* Gets the value of the iteratorID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIteratorID() {
return iteratorID;
}
/**
* Sets the value of the iteratorID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIteratorID(String value) {
this.iteratorID = value;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.waf.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* In a <a>GetSampledRequests</a> request, the <code>StartTime</code> and <code>EndTime</code> objects specify the time
* range for which you want AWS WAF to return a sample of web requests.
* </p>
* <p>
* In a <a>GetSampledRequests</a> response, the <code>StartTime</code> and <code>EndTime</code> objects specify the time
* range for which AWS WAF actually returned a sample of web requests. AWS WAF gets the specified number of requests
* from among the first 5,000 requests that your AWS resource receives during the specified time period. If your
* resource receives more than 5,000 requests during that period, AWS WAF stops sampling after the 5,000th request. In
* that case, <code>EndTime</code> is the time that AWS WAF received the 5,000th request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/TimeWindow" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TimeWindow implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*/
private java.util.Date startTime;
/**
* <p>
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests
* that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*/
private java.util.Date endTime;
/**
* <p>
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @param startTime
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of
* the requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
*/
public void setStartTime(java.util.Date startTime) {
this.startTime = startTime;
}
/**
* <p>
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @return The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of
* the requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
*/
public java.util.Date getStartTime() {
return this.startTime;
}
/**
* <p>
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @param startTime
* The beginning of the time range from which you want <code>GetSampledRequests</code> to return a sample of
* the requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TimeWindow withStartTime(java.util.Date startTime) {
setStartTime(startTime);
return this;
}
/**
* <p>
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests
* that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @param endTime
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
*/
public void setEndTime(java.util.Date endTime) {
this.endTime = endTime;
}
/**
* <p>
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests
* that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @return The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
*/
public java.util.Date getEndTime() {
return this.endTime;
}
/**
* <p>
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the requests
* that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* </p>
*
* @param endTime
* The end of the time range from which you want <code>GetSampledRequests</code> to return a sample of the
* requests that your AWS resource received. Specify the date and time in the following format:
* <code>"2016-09-27T14:50Z"</code>. You can specify any time range in the previous three hours.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TimeWindow withEndTime(java.util.Date endTime) {
setEndTime(endTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStartTime() != null)
sb.append("StartTime: ").append(getStartTime()).append(",");
if (getEndTime() != null)
sb.append("EndTime: ").append(getEndTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TimeWindow == false)
return false;
TimeWindow other = (TimeWindow) obj;
if (other.getStartTime() == null ^ this.getStartTime() == null)
return false;
if (other.getStartTime() != null && other.getStartTime().equals(this.getStartTime()) == false)
return false;
if (other.getEndTime() == null ^ this.getEndTime() == null)
return false;
if (other.getEndTime() != null && other.getEndTime().equals(this.getEndTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStartTime() == null) ? 0 : getStartTime().hashCode());
hashCode = prime * hashCode + ((getEndTime() == null) ? 0 : getEndTime().hashCode());
return hashCode;
}
@Override
public TimeWindow clone() {
try {
return (TimeWindow) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.waf.model.waf_regional.transform.TimeWindowMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.ant;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.taskdefs.Redirector;
import org.apache.tools.ant.types.RedirectorElement;
/**
* Abstract base class to add output redirection support for Catalina
* Ant tasks. These tasks require Ant 1.5 or later.
* <br>
* <strong>WARNING:</strong> due to depends chain, Ant could call a Task
* more than once and this can affect the output redirection when configured.
* If you are collecting the output in a property, it will collect the output
* of only the first run, since Ant properties are immutable and once created
* they cannot be changed.
* <br>
* If you are collecting output in a file the file will be overwritten with the
* output of the last run, unless you set append="true", in which case each run
* will append it's output to the file.
*
*
* @author Gabriele Garuglieri
* @since 5.5
*/
public abstract class BaseRedirectorHelperTask extends Task {
// ------------------------------------------------------------- Properties
/** Redirector helper */
protected Redirector redirector = new Redirector(this);
//protected Redirector redirector = null;
/** Redirector element for this task */
protected RedirectorElement redirectorElement = null;
/** The stream for info output */
protected OutputStream redirectOutStream = null;
/** The stream for error output */
protected OutputStream redirectErrStream = null;
/** The print stream for info output */
PrintStream redirectOutPrintStream = null;
/** The print stream for error output */
PrintStream redirectErrPrintStream = null;
/**
* Whether to fail (with a BuildException) if
* ManagerServlet returns an error. The default behavior is
* to do so.
* <b>
* This flag does not control parameters checking. If the task is called
* with wrong or invalid parameters, it will throw BuildException
* independently from the setting of this flag.
*/
protected boolean failOnError = true;
/**
* <code>true</code> true when output redirection is requested for this task .
* Default is to log on Ant log.
*/
protected boolean redirectOutput = false;
/**
* will be set to <code>true</code> when the configuration of the Redirector is
* complete.
*/
protected boolean redirectorConfigured = false;
/**
* Flag which indicates that, if redirected, output should also be
* always sent to the log. Default is that output is sent only to
* redirected streams.
*/
protected boolean alwaysLog = false;
/**
* Whether to fail (with a BuildException) if
* ManagerServlet returns an error. The default behavior is
* to do so.
*/
public void setFailonerror(boolean fail) {
failOnError = fail;
}
/**
* Returns the value of the failOnError
* property.
*/
public boolean isFailOnError() {
return failOnError;
}
/**
* File the output of the task is redirected to.
*
* @param out name of the output file
*/
public void setOutput(File out) {
redirector.setOutput(out);
redirectOutput = true;
}
/**
* File the error output of the task is redirected to.
*
* @param error name of the error file
*
*/
public void setError(File error) {
redirector.setError(error);
redirectOutput = true;
}
/**
* Controls whether error output is logged. This is only useful
* when output is being redirected and error output is desired in the
* Ant log
*
* @param logError if true the standard error is sent to the Ant log system
* and not sent to output stream.
*/
public void setLogError(boolean logError) {
redirector.setLogError(logError);
redirectOutput = true;
}
/**
* Property name whose value should be set to the output of
* the task.
*
* @param outputProperty property name
*
*/
public void setOutputproperty(String outputProperty) {
redirector.setOutputProperty(outputProperty);
redirectOutput = true;
}
/**
* Property name whose value should be set to the error of
* the task..
*
* @param errorProperty property name
*
*/
public void setErrorProperty(String errorProperty) {
redirector.setErrorProperty(errorProperty);
redirectOutput = true;
}
/**
* If true, append output to existing file.
*
* @param append if true, append output to existing file
*
*/
public void setAppend(boolean append) {
redirector.setAppend(append);
redirectOutput = true;
}
/**
* If true, (error and non-error) output will be redirected
* as specified while being sent to Ant's logging mechanism as if no
* redirection had taken place. Defaults to false.
* <br>
* Actually handled internally, with Ant 1.6.3 it will be handled by
* the <code>Redirector</code> itself.
* @param alwaysLog <code>boolean</code>
*/
public void setAlwaysLog(boolean alwaysLog) {
this.alwaysLog = alwaysLog;
//redirector.setAlwaysLog(alwaysLog);
redirectOutput = true;
}
/**
* Whether output and error files should be created even when empty.
* Defaults to true.
* @param createEmptyFiles <CODE>boolean</CODE>.
*/
public void setCreateEmptyFiles(boolean createEmptyFiles) {
redirector.setCreateEmptyFiles(createEmptyFiles);
redirectOutput = true;
}
/**
* Add a <CODE>RedirectorElement</CODE> to this task.
* @param redirectorElement <CODE>RedirectorElement</CODE>.
*/
public void addConfiguredRedirector(RedirectorElement redirectorElement) {
if (this.redirectorElement != null) {
throw new BuildException("Cannot have > 1 nested <redirector>s");
} else {
this.redirectorElement = redirectorElement;
}
}
/**
* Set up properties on the Redirector from RedirectorElement if present.
*/
private void configureRedirector() {
if (redirectorElement != null) {
redirectorElement.configure(redirector);
redirectOutput = true;
}
/*
* Due to depends chain, Ant could call the Task more than once,
* this is to prevent that we attempt to configure uselessly
* more than once the Redirector.
*/
redirectorConfigured = true;
}
/**
* Set up properties on the Redirector and create output streams.
*/
protected void openRedirector() {
if (! redirectorConfigured) {
configureRedirector();
}
if (redirectOutput) {
redirector.createStreams();
redirectOutStream = redirector.getOutputStream();
redirectOutPrintStream = new PrintStream(redirectOutStream);
redirectErrStream = redirector.getErrorStream();
redirectErrPrintStream = new PrintStream(redirectErrStream);
}
}
/**
* Ask redirector to close all the streams. It is necessary to call this method
* before leaving the Task to have the Streams flush their contents. If you are
* collecting output in a property, it will be created only if this method is
* called, otherwise you'll find it unset.
*/
protected void closeRedirector() {
try {
if (redirectOutput && redirectOutPrintStream != null) {
redirector.complete();
}
} catch (IOException ioe) {
log("Error closing redirector: "
+ ioe.getMessage(), Project.MSG_ERR);
}
/*
* Due to depends chain, Ant could call the Task more than once,
* this is to prevent that we attempt to reuse the previously
* closed Streams.
*/
redirectOutStream = null;
redirectOutPrintStream = null;
redirectErrStream = null;
redirectErrPrintStream = null;
}
/**
* Handles output with the INFO priority.
*
* @param output The output to log. Should not be <code>null</code>.
*/
@Override
protected void handleOutput(String output) {
if (redirectOutput) {
if (redirectOutPrintStream == null) {
openRedirector();
}
redirectOutPrintStream.println(output);
if (alwaysLog) {
log(output, Project.MSG_INFO);
}
} else {
log(output, Project.MSG_INFO);
}
}
/**
* Handles output with the INFO priority and flushes the stream.
*
* @param output The output to log. Should not be <code>null</code>.
*
*/
@Override
protected void handleFlush(String output) {
handleOutput(output);
redirectOutPrintStream.flush();
}
/**
* Handles error output with the ERR priority.
*
* @param output The error output to log. Should not be <code>null</code>.
*/
@Override
protected void handleErrorOutput(String output) {
if (redirectOutput) {
if (redirectErrPrintStream == null) {
openRedirector();
}
redirectErrPrintStream.println(output);
if (alwaysLog) {
log(output, Project.MSG_ERR);
}
} else {
log(output, Project.MSG_ERR);
}
}
/**
* Handles error output with the ERR priority and flushes the stream.
*
* @param output The error output to log. Should not be <code>null</code>.
*
*/
@Override
protected void handleErrorFlush(String output) {
handleErrorOutput(output);
redirectErrPrintStream.flush();
}
/**
* Handles output with ERR priority to error stream and all other
* priorities to output stream.
*
* @param output The output to log. Should not be <code>null</code>.
*/
protected void handleOutput(String output, int priority) {
if (priority == Project.MSG_ERR) {
handleErrorOutput(output);
} else {
handleOutput(output);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.orm.dao;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.persist.UnitOfWork;
import junit.framework.Assert;
import org.apache.ambari.server.controller.RootServiceResponseFactory;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.OrmTestHelper;
import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.orm.entities.AlertGroupEntity;
import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
import org.apache.ambari.server.orm.entities.AlertNoticeEntity;
import org.apache.ambari.server.orm.entities.ClusterEntity;
import org.apache.ambari.server.state.AlertState;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.NotificationState;
import org.apache.ambari.server.state.alert.Scope;
import org.apache.ambari.server.state.alert.SourceType;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import java.util.UUID;
import org.apache.ambari.server.H2DatabaseCleaner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* Tests {@link AlertDefinitionDAO} for interacting with
* {@link AlertDefinitionEntity}.
*/
public class AlertDefinitionDAOTest {
static Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
Injector injector;
Long clusterId;
AlertDefinitionDAO dao;
AlertsDAO alertsDao;
AlertDispatchDAO dispatchDao;
OrmTestHelper helper;
/**
*
*/
@Before
public void setup() throws Exception {
// LoggerFactory.getLogger("eclipselink").
injector = Guice.createInjector(new InMemoryDefaultTestModule());
injector.getInstance(GuiceJpaInitializer.class);
injector.getInstance(UnitOfWork.class).begin();
dispatchDao = injector.getInstance(AlertDispatchDAO.class);
dao = injector.getInstance(AlertDefinitionDAO.class);
alertsDao = injector.getInstance(AlertsDAO.class);
helper = injector.getInstance(OrmTestHelper.class);
clusterId = helper.createCluster();
// create required default groups
helper.createDefaultAlertGroups(clusterId);
// create 8 HDFS alerts
int i = 0;
for (; i < 8; i++) {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("Alert Definition " + i);
definition.setServiceName("HDFS");
definition.setComponentName(null);
definition.setClusterId(clusterId);
definition.setHash(UUID.randomUUID().toString());
definition.setScheduleInterval(60);
definition.setScope(Scope.SERVICE);
definition.setSource("{\"type\" : \"SCRIPT\"}");
definition.setSourceType(SourceType.SCRIPT);
dao.create(definition);
}
// create 2 HDFS with components
for (; i < 10; i++) {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("Alert Definition " + i);
definition.setServiceName("HDFS");
if (i == 9) {
definition.setComponentName("NAMENODE");
} else {
definition.setComponentName("DATANODE");
}
definition.setClusterId(clusterId);
definition.setHash(UUID.randomUUID().toString());
definition.setScheduleInterval(60);
definition.setScope(Scope.SERVICE);
definition.setSource("{\"type\" : \"SCRIPT\"}");
definition.setSourceType(SourceType.SCRIPT);
dao.create(definition);
}
// create 2 host scoped
for (; i < 12; i++) {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("Alert Definition " + i);
definition.setServiceName("OOZIE");
definition.setComponentName("OOZIE_SERVER");
definition.setClusterId(clusterId);
definition.setHash(UUID.randomUUID().toString());
definition.setScheduleInterval(60);
definition.setScope(Scope.HOST);
definition.setSource("{\"type\" : \"SCRIPT\"}");
definition.setSourceType(SourceType.SCRIPT);
dao.create(definition);
}
// create 3 agent alerts
for (; i < 15; i++) {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("Alert Definition " + i);
definition.setServiceName(RootServiceResponseFactory.Services.AMBARI.name());
definition.setComponentName(RootServiceResponseFactory.Components.AMBARI_AGENT.name());
definition.setClusterId(clusterId);
definition.setHash(UUID.randomUUID().toString());
definition.setScheduleInterval(60);
definition.setScope(Scope.HOST);
definition.setSource("{\"type\" : \"SCRIPT\"}");
definition.setSourceType(SourceType.SCRIPT);
dao.create(definition);
}
}
@After
public void teardown() throws Exception {
injector.getInstance(UnitOfWork.class).end();
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
injector = null;
}
/**
*
*/
@Test
public void testFindByName() {
List<AlertDefinitionEntity> definitions = dao.findAll();
assertNotNull(definitions);
AlertDefinitionEntity definition = definitions.get(2);
AlertDefinitionEntity retrieved = dao.findByName(
definition.getClusterId(), definition.getDefinitionName());
assertEquals(definition, retrieved);
}
/**
*
*/
@Test
public void testFindAll() {
List<AlertDefinitionEntity> definitions = dao.findAll();
assertNotNull(definitions);
assertEquals(15, definitions.size());
}
/**
*
*/
@Test
public void testFindAllEnabled() {
List<AlertDefinitionEntity> definitions = dao.findAll();
assertNotNull(definitions);
assertEquals(15, definitions.size());
List<AlertDefinitionEntity> enabledDefinitions = dao.findAllEnabled(clusterId);
assertNotNull(enabledDefinitions);
assertEquals(definitions.size(), enabledDefinitions.size());
enabledDefinitions.get(0).setEnabled(false);
dao.merge(enabledDefinitions.get(0));
enabledDefinitions = dao.findAllEnabled(clusterId);
assertNotNull(enabledDefinitions);
assertEquals(definitions.size() - 1, enabledDefinitions.size());
}
/**
*
*/
@Test
public void testFindById() {
List<AlertDefinitionEntity> definitions = dao.findAll();
assertNotNull(definitions);
AlertDefinitionEntity definition = definitions.get(2);
AlertDefinitionEntity retrieved = dao.findById(definition.getDefinitionId());
assertEquals(definition, retrieved);
}
/**
*
*/
@Test
public void testFindByIds() {
List<AlertDefinitionEntity> definitions = dao.findAll();
List<Long> ids = new ArrayList<Long>();
ids.add(definitions.get(0).getDefinitionId());
ids.add(definitions.get(1).getDefinitionId());
ids.add(99999L);
definitions = dao.findByIds(ids);
assertEquals(2, definitions.size());
}
/**
*
*/
@Test
public void testFindByService() {
List<AlertDefinitionEntity> definitions = dao.findByService(clusterId,
"HDFS");
assertNotNull(definitions);
assertEquals(10, definitions.size());
definitions = dao.findByService(clusterId, "YARN");
assertNotNull(definitions);
assertEquals(0, definitions.size());
}
/**
*
*/
@Test
public void testFindByServiceComponent() {
List<AlertDefinitionEntity> definitions = dao.findByServiceComponent(
clusterId, "OOZIE", "OOZIE_SERVER");
assertNotNull(definitions);
assertEquals(2, definitions.size());
}
/**
*
*/
@Test
public void testFindAgentScoped() {
List<AlertDefinitionEntity> definitions = dao.findAgentScoped(clusterId);
assertNotNull(definitions);
assertEquals(3, definitions.size());
}
@Test
public void testRefresh() {
}
@Test
public void testCreate() {
}
@Test
public void testMerge() {
}
@Test
public void testRemove() throws Exception {
AlertDefinitionEntity definition = helper.createAlertDefinition(clusterId);
definition = dao.findById(definition.getDefinitionId());
assertNotNull(definition);
dao.remove(definition);
definition = dao.findById(definition.getDefinitionId());
assertNull(definition);
}
/**
* @throws Exception
*/
@Test
public void testCascadeDelete() throws Exception {
AlertDefinitionEntity definition = helper.createAlertDefinition(clusterId);
AlertGroupEntity group = helper.createAlertGroup(clusterId, null);
group.addAlertDefinition(definition);
dispatchDao.merge(group);
AlertHistoryEntity history = new AlertHistoryEntity();
history.setServiceName(definition.getServiceName());
history.setClusterId(clusterId);
history.setAlertDefinition(definition);
history.setAlertLabel("Label");
history.setAlertState(AlertState.OK);
history.setAlertText("Alert Text");
history.setAlertTimestamp(calendar.getTimeInMillis());
alertsDao.create(history);
AlertCurrentEntity current = new AlertCurrentEntity();
current.setAlertHistory(history);
current.setLatestTimestamp(new Date().getTime());
current.setOriginalTimestamp(new Date().getTime() - 10800000);
current.setMaintenanceState(MaintenanceState.OFF);
alertsDao.create(current);
AlertNoticeEntity notice = new AlertNoticeEntity();
notice.setAlertHistory(history);
notice.setAlertTarget(helper.createAlertTarget());
notice.setNotifyState(NotificationState.PENDING);
notice.setUuid(UUID.randomUUID().toString());
dispatchDao.create(notice);
group = dispatchDao.findGroupById(group.getGroupId());
assertNotNull(group);
assertNotNull(group.getAlertDefinitions());
assertEquals(1, group.getAlertDefinitions().size());
history = alertsDao.findById(history.getAlertId());
assertNotNull(history);
current = alertsDao.findCurrentById(current.getAlertId());
assertNotNull(current);
assertNotNull(current.getAlertHistory());
notice = dispatchDao.findNoticeById(notice.getNotificationId());
assertNotNull(notice);
assertNotNull(notice.getAlertHistory());
assertNotNull(notice.getAlertTarget());
// delete the definition
definition = dao.findById(definition.getDefinitionId());
dao.refresh(definition);
dao.remove(definition);
notice = dispatchDao.findNoticeById(notice.getNotificationId());
assertNull(notice);
current = alertsDao.findCurrentById(current.getAlertId());
assertNull(current);
history = alertsDao.findById(history.getAlertId());
assertNull(history);
group = dispatchDao.findGroupById(group.getGroupId());
assertNotNull(group);
assertNotNull(group.getAlertDefinitions());
assertEquals(0, group.getAlertDefinitions().size());
}
/**
* @throws Exception
*/
@Test
public void testCascadeDeleteForCluster() throws Exception {
AlertDefinitionEntity definition = helper.createAlertDefinition(clusterId);
definition = dao.findById(definition.getDefinitionId());
dao.refresh(definition);
ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
ClusterEntity clusterEntity = clusterDAO.findById(clusterId);
clusterDAO.refresh(clusterEntity);
Clusters clusters = injector.getInstance(Clusters.class);
Cluster cluster = clusters.getClusterById(clusterId);
cluster.delete();
assertNull(clusterDAO.findById(clusterId));
assertNull(dao.findById(definition.getDefinitionId()));
assertEquals(0, dispatchDao.findAllGroups(clusterId).size());
}
@Test
public void testNestedClusterEntity() throws Exception {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("nested-cluster-entity-test");
definition.setServiceName("HDFS");
definition.setComponentName(null);
definition.setClusterId(clusterId);
definition.setHash(UUID.randomUUID().toString());
definition.setScheduleInterval(60);
definition.setScope(Scope.SERVICE);
definition.setSource("{\"type\" : \"SCRIPT\"}");
definition.setSourceType(SourceType.SCRIPT);
dao.create(definition);
definition = dao.findById(definition.getDefinitionId());
assertNotNull(definition.getCluster());
assertEquals(clusterId, definition.getCluster().getClusterId());
}
@Test
public void testBatchDeleteOfNoticeEntities() throws Exception {
AlertDefinitionEntity definition = helper.createAlertDefinition(clusterId);
AlertGroupEntity group = helper.createAlertGroup(clusterId, null);
group.addAlertDefinition(definition);
dispatchDao.merge(group);
// Add 1000+ notice entities
for (int i = 0; i < 1500; i++) {
AlertHistoryEntity history = new AlertHistoryEntity();
history.setServiceName(definition.getServiceName());
history.setClusterId(clusterId);
history.setAlertDefinition(definition);
history.setAlertLabel("Label");
history.setAlertState(AlertState.OK);
history.setAlertText("Alert Text");
history.setAlertTimestamp(calendar.getTimeInMillis());
alertsDao.create(history);
AlertCurrentEntity current = new AlertCurrentEntity();
current.setAlertHistory(history);
current.setLatestTimestamp(new Date().getTime());
current.setOriginalTimestamp(new Date().getTime() - 10800000);
current.setMaintenanceState(MaintenanceState.OFF);
alertsDao.create(current);
AlertNoticeEntity notice = new AlertNoticeEntity();
notice.setAlertHistory(history);
notice.setAlertTarget(helper.createAlertTarget());
notice.setNotifyState(NotificationState.PENDING);
notice.setUuid(UUID.randomUUID().toString());
dispatchDao.create(notice);
}
group = dispatchDao.findGroupById(group.getGroupId());
assertNotNull(group);
assertNotNull(group.getAlertDefinitions());
assertEquals(1, group.getAlertDefinitions().size());
List<AlertHistoryEntity> historyEntities = alertsDao.findAll();
assertEquals(1500, historyEntities.size());
List<AlertCurrentEntity> currentEntities = alertsDao.findCurrentByDefinitionId(definition.getDefinitionId());
assertNotNull(currentEntities);
assertEquals(1500, currentEntities.size());
List<AlertNoticeEntity> noticeEntities = dispatchDao.findAllNotices();
Assert.assertEquals(1500, noticeEntities.size());
// delete the definition
definition = dao.findById(definition.getDefinitionId());
dao.refresh(definition);
dao.remove(definition);
List<AlertNoticeEntity> notices = dispatchDao.findAllNotices();
assertTrue(notices.isEmpty());
currentEntities = alertsDao.findCurrentByDefinitionId(definition.getDefinitionId());
assertTrue(currentEntities == null || currentEntities.isEmpty());
historyEntities = alertsDao.findAll();
assertTrue(historyEntities == null || historyEntities.isEmpty());
group = dispatchDao.findGroupById(group.getGroupId());
assertNotNull(group);
assertNotNull(group.getAlertDefinitions());
assertEquals(0, group.getAlertDefinitions().size());
}
}
| |
package org.zstack.storage.primary;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.asyncbatch.While;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cascade.CascadeFacade;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.cloudbus.CloudBusListCallBack;
import org.zstack.core.cloudbus.EventFacade;
import org.zstack.core.db.*;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.job.JobQueueFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.trash.StorageTrash;
import org.zstack.core.trash.TrashType;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.NopeCompletion;
import org.zstack.header.core.ReturnValueCompletion;
import org.zstack.header.core.trash.CleanTrashResult;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.host.DetachIsoOnPrimaryStorageMsg;
import org.zstack.header.message.APIDeleteMessage;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.storage.backup.StorageTrashSpec;
import org.zstack.header.storage.primary.*;
import org.zstack.header.storage.primary.PrimaryStorageCanonicalEvent.PrimaryStorageDeletedData;
import org.zstack.header.storage.primary.PrimaryStorageCanonicalEvent.PrimaryStorageStatusChangedData;
import org.zstack.header.storage.snapshot.*;
import org.zstack.header.vm.StopVmInstanceMsg;
import org.zstack.header.vm.VmAttachVolumeValidatorMethod;
import org.zstack.header.vm.VmInstanceConstant;
import org.zstack.header.volume.VolumeConstant;
import org.zstack.header.volume.VolumeReportPrimaryStorageCapacityUsageMsg;
import org.zstack.header.volume.VolumeReportPrimaryStorageCapacityUsageReply;
import org.zstack.utils.CollectionDSL;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import javax.persistence.LockModeType;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.zstack.core.Platform.err;
import static org.zstack.core.Platform.operr;
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE, dependencyCheck = true)
public abstract class PrimaryStorageBase extends AbstractPrimaryStorage {
private final static CLogger logger = Utils.getLogger(PrimaryStorageBase.class);
protected PrimaryStorageVO self;
@Autowired
protected CloudBus bus;
@Autowired
protected DatabaseFacade dbf;
@Autowired
protected JobQueueFacade jobf;
@Autowired
protected PrimaryStorageExtensionPointEmitter extpEmitter;
@Autowired
protected CascadeFacade casf;
@Autowired
protected ErrorFacade errf;
@Autowired
protected ThreadFacade thdf;
@Autowired
protected PrimaryStorageOverProvisioningManager ratioMgr;
@Autowired
protected EventFacade evtf;
@Autowired
protected PrimaryStoragePingTracker tracker;
@Autowired
protected StorageTrash trash;
public PrimaryStorageBase() {
}
public static class PhysicalCapacityUsage {
public long totalPhysicalSize;
public long availablePhysicalSize;
}
public static class ConnectParam {
private boolean newAdded;
public boolean isNewAdded() {
return newAdded;
}
public void setNewAdded(boolean newAdded) {
this.newAdded = newAdded;
}
}
protected abstract void handle(InstantiateVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(DeleteVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(CreateTemplateFromVolumeOnPrimaryStorageMsg msg);
protected abstract void handle(DownloadDataVolumeToPrimaryStorageMsg msg);
protected abstract void handle(GetInstallPathForDataVolumeDownloadMsg msg);
protected abstract void handle(DeleteVolumeBitsOnPrimaryStorageMsg msg);
protected abstract void handle(DeleteBitsOnPrimaryStorageMsg msg);
protected abstract void handle(DownloadIsoToPrimaryStorageMsg msg);
protected abstract void handle(DeleteIsoFromPrimaryStorageMsg msg);
protected abstract void handle(AskVolumeSnapshotCapabilityMsg msg);
protected abstract void handle(SyncVolumeSizeOnPrimaryStorageMsg msg);
protected abstract void handle(MergeVolumeSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(DeleteSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(RevertVolumeFromSnapshotOnPrimaryStorageMsg msg);
protected abstract void handle(ReInitRootVolumeFromTemplateOnPrimaryStorageMsg msg);
protected abstract void handle(AskInstallPathForNewSnapshotMsg msg);
protected abstract void handle(GetPrimaryStorageResourceLocationMsg msg);
protected abstract void handle(CheckVolumeSnapshotOperationOnPrimaryStorageMsg msg);
protected abstract void connectHook(ConnectParam param, Completion completion);
protected abstract void pingHook(Completion completion);
protected abstract void syncPhysicalCapacity(ReturnValueCompletion<PhysicalCapacityUsage> completion);
public PrimaryStorageBase(PrimaryStorageVO self) {
this.self = self;
}
protected PrimaryStorageInventory getSelfInventory() {
return PrimaryStorageInventory.valueOf(self);
}
protected String getSyncId() {
return String.format("primaryStorage-%s", self.getUuid());
}
protected static List<TrashType> trashLists = CollectionDSL.list(TrashType.MigrateVolume, TrashType.MigrateVolumeSnapshot, TrashType.RevertVolume, TrashType.VolumeSnapshot, TrashType.ReimageVolume);
protected void fireDisconnectedCanonicalEvent(ErrorCode reason) {
PrimaryStorageCanonicalEvent.DisconnectedData data = new PrimaryStorageCanonicalEvent.DisconnectedData();
data.setPrimaryStorageUuid(self.getUuid());
data.setReason(reason);
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_DISCONNECTED, data);
}
@Override
public void attachHook(String clusterUuid, Completion completion) {
completion.success();
}
@Override
public void detachHook(String clusterUuid, Completion completion) {
completion.success();
}
@Override
public void deleteHook() {
}
@Override
public void changeStateHook(PrimaryStorageStateEvent evt, PrimaryStorageState nextState) {
}
@Override
public void handleMessage(Message msg) {
try {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
} catch (Exception e) {
bus.logExceptionWithMessageDump(msg, e);
bus.replyErrorByMessageType(msg, e);
}
}
// if new kind of storage is added , override it
protected void checkImageIfNeedToDownload(DownloadIsoToPrimaryStorageMsg msg){
logger.debug("check if image exist in disabled primary storage");
if(self.getState() != PrimaryStorageState.Disabled){
return ;
}
if( !Q.New(ImageCacheVO.class)
.eq(ImageCacheVO_.primaryStorageUuid, self.getUuid())
.eq(ImageCacheVO_.imageUuid, msg.getIsoSpec().getInventory().getUuid())
.isExists()){
throw new OperationFailureException(operr(
"cannot attach ISO to a primary storage[uuid:%s] which is disabled",
self.getUuid()));
}
}
private class PrimaryStorageValidater{
private boolean forbidOperationWhenPrimaryStorageDisable = false;
private boolean forbidOperationWhenPrimaryStorageMaintenance = false;
public PrimaryStorageValidater disable(){
this.forbidOperationWhenPrimaryStorageDisable = true;
return this;
}
public PrimaryStorageValidater maintenance(){
this.forbidOperationWhenPrimaryStorageMaintenance = true;
return this;
}
public void validate(){
ErrorCode errorCode = new ErrorCode();
errorCode.setCode(PrimaryStorageErrors.ALLOCATE_ERROR.toString());
errorCode.setDescription("Operation is not permitted");
if (forbidOperationWhenPrimaryStorageDisable && self.getState().equals(PrimaryStorageState.Disabled)) {
String error = "Operation is not permitted when primary storage status is 'Disabled', please check primary storage status";
errorCode.setDetails(error);
}
if (forbidOperationWhenPrimaryStorageMaintenance && self.getState().equals(PrimaryStorageState.Maintenance)) {
String error = "Operation is not permitted when primary storage status is 'Maintenance', please check primary storage status";
errorCode.setDetails(error);
}
if (null != errorCode.getDetails()){
throw new OperationFailureException(errorCode);
}
}
}
private void checkPrimaryStatus(Message msg) {
if (msg instanceof InstantiateVolumeOnPrimaryStorageMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof DownloadVolumeTemplateToPrimaryStorageMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof CreateTemplateFromVolumeOnPrimaryStorageMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof DownloadDataVolumeToPrimaryStorageMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof DeleteVolumeBitsOnPrimaryStorageMsg) {
new PrimaryStorageValidater().maintenance()
.validate();
} else if (msg instanceof DeleteIsoFromPrimaryStorageMsg) {
new PrimaryStorageValidater().maintenance()
.validate();
} else if (msg instanceof AskVolumeSnapshotCapabilityMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof MergeVolumeSnapshotOnPrimaryStorageMsg) {
new PrimaryStorageValidater().maintenance()
.validate();
} else if (msg instanceof RevertVolumeFromSnapshotOnPrimaryStorageMsg) {
new PrimaryStorageValidater().maintenance()
.validate();
} else if (msg instanceof ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
} else if (msg instanceof CheckVolumeSnapshotOperationOnPrimaryStorageMsg) {
SnapshotBackendOperation operation = ((CheckVolumeSnapshotOperationOnPrimaryStorageMsg) msg).getOperation();
if (operation == SnapshotBackendOperation.FILE_CREATION) {
new PrimaryStorageValidater().disable().maintenance()
.validate();
}
}
}
protected void handleLocalMessage(Message msg) {
checkPrimaryStatus(msg);
if (msg instanceof PrimaryStorageReportPhysicalCapacityMsg) {
handle((PrimaryStorageReportPhysicalCapacityMsg) msg);
} else if (msg instanceof RecalculatePrimaryStorageCapacityMsg) {
handle((RecalculatePrimaryStorageCapacityMsg) msg);
} else if (msg instanceof InstantiateVolumeOnPrimaryStorageMsg) {
handle((InstantiateVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteVolumeOnPrimaryStorageMsg) {
handle((DeleteVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteBitsOnPrimaryStorageMsg) {
handle((DeleteBitsOnPrimaryStorageMsg) msg);
} else if (msg instanceof CreateTemplateFromVolumeOnPrimaryStorageMsg) {
handleBase((CreateTemplateFromVolumeOnPrimaryStorageMsg) msg);
} else if (msg instanceof PrimaryStorageDeletionMsg) {
handle((PrimaryStorageDeletionMsg) msg);
} else if (msg instanceof DetachPrimaryStorageFromClusterMsg) {
handle((DetachPrimaryStorageFromClusterMsg) msg);
} else if (msg instanceof DownloadDataVolumeToPrimaryStorageMsg) {
handleBase((DownloadDataVolumeToPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteVolumeBitsOnPrimaryStorageMsg) {
handle((DeleteVolumeBitsOnPrimaryStorageMsg) msg);
} else if (msg instanceof ConnectPrimaryStorageMsg) {
handle((ConnectPrimaryStorageMsg) msg);
} else if (msg instanceof DownloadIsoToPrimaryStorageMsg) {
handleBase((DownloadIsoToPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteIsoFromPrimaryStorageMsg) {
handle((DeleteIsoFromPrimaryStorageMsg) msg);
} else if (msg instanceof AskVolumeSnapshotCapabilityMsg) {
handle((AskVolumeSnapshotCapabilityMsg) msg);
} else if (msg instanceof SyncVolumeSizeOnPrimaryStorageMsg) {
handle((SyncVolumeSizeOnPrimaryStorageMsg) msg);
} else if (msg instanceof PingPrimaryStorageMsg) {
handle((PingPrimaryStorageMsg) msg);
} else if (msg instanceof ChangePrimaryStorageStatusMsg) {
handle((ChangePrimaryStorageStatusMsg) msg);
} else if (msg instanceof ReconnectPrimaryStorageMsg) {
handle((ReconnectPrimaryStorageMsg) msg);
} else if (msg instanceof RevertVolumeFromSnapshotOnPrimaryStorageMsg) {
handle((RevertVolumeFromSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) {
handle((ReInitRootVolumeFromTemplateOnPrimaryStorageMsg) msg);
} else if (msg instanceof MergeVolumeSnapshotOnPrimaryStorageMsg) {
handle((MergeVolumeSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof DeleteSnapshotOnPrimaryStorageMsg) {
handle((DeleteSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof UpdatePrimaryStorageHostStatusMsg) {
handle((UpdatePrimaryStorageHostStatusMsg) msg);
} else if (msg instanceof GetInstallPathForDataVolumeDownloadMsg) {
handle((GetInstallPathForDataVolumeDownloadMsg) msg);
} else if (msg instanceof AskInstallPathForNewSnapshotMsg) {
handle((AskInstallPathForNewSnapshotMsg) msg);
} else if ((msg instanceof SyncPrimaryStorageCapacityMsg)) {
handle((SyncPrimaryStorageCapacityMsg) msg);
} else if ((msg instanceof DetachIsoOnPrimaryStorageMsg)) {
handle((DetachIsoOnPrimaryStorageMsg) msg);
} else if (msg instanceof DownloadVolumeTemplateToPrimaryStorageMsg) {
handle((DownloadVolumeTemplateToPrimaryStorageMsg) msg);
} else if ((msg instanceof CheckInstallPathInTrashMsg)) {
handle((CheckInstallPathInTrashMsg) msg);
} else if ((msg instanceof CleanUpTrashOnPrimaryStroageMsg)) {
handle((CleanUpTrashOnPrimaryStroageMsg) msg);
} else if ((msg instanceof GetVolumeSnapshotSizeOnPrimaryStorageMsg)) {
handle((GetVolumeSnapshotSizeOnPrimaryStorageMsg) msg);
} else if ((msg instanceof CleanUpTrashOnPrimaryStorageMsg)) {
handle((CleanUpTrashOnPrimaryStorageMsg) msg);
} else if ((msg instanceof GetPrimaryStorageResourceLocationMsg)) {
handle((GetPrimaryStorageResourceLocationMsg) msg);
} else if (msg instanceof CheckVolumeSnapshotOperationOnPrimaryStorageMsg) {
handleBase((CheckVolumeSnapshotOperationOnPrimaryStorageMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
protected void handle(final CleanUpTrashOnPrimaryStroageMsg msg) {
MessageReply reply = new MessageReply();
thdf.chainSubmit(new ChainTask(msg) {
private String name = String.format("cleanup-trash-on-%s", self.getUuid());
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
cleanUpTrash(msg.getTrashId(), new ReturnValueCompletion<CleanTrashResult>(msg) {
@Override
public void success(CleanTrashResult returnValue) {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return name;
}
});
}
protected void handle(final CheckInstallPathInTrashMsg msg) {
CheckInstallPathInTrashReply reply = new CheckInstallPathInTrashReply();
Long trashId = trash.getTrashId(self.getUuid(), msg.getInstallPath());
if (trashId != null) {
StorageTrashSpec spec = trash.getTrash(self.getUuid(), trashId);
reply.setTrashId(trashId);
reply.setResourceUuid(spec.getResourceUuid());
}
bus.reply(msg, reply);
}
protected void handle(DownloadVolumeTemplateToPrimaryStorageMsg msg) {
MessageReply reply = new MessageReply();
bus.reply(msg, reply);
}
protected void handle(final DetachIsoOnPrimaryStorageMsg msg) {
MessageReply reply = new MessageReply();
bus.reply(msg, reply);
}
protected void handle(UpdatePrimaryStorageHostStatusMsg msg){
updatePrimaryStorageHostStatus(msg.getPrimaryStorageUuids(), msg.getHostUuid(), msg.getStatus(), msg.getReason());
}
protected void updatePrimaryStorageHostStatus(List<String> psUuids, String hostUuid, PrimaryStorageHostStatus newStatus, ErrorCode reason){
List<PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData> datas = new ArrayList<>();
new SQLBatch(){
@Override
protected void scripts() {
for(String psUuid : psUuids){
PrimaryStorageHostStatus oldStatus = Q.New(PrimaryStorageHostRefVO.class)
.eq(PrimaryStorageHostRefVO_.hostUuid, hostUuid)
.eq(PrimaryStorageHostRefVO_.primaryStorageUuid, psUuid)
.select(PrimaryStorageHostRefVO_.status)
.findValue();
if (oldStatus == newStatus) {
return;
}
if (oldStatus == null) {
PrimaryStorageHostRefVO ref = new PrimaryStorageHostRefVO();
ref.setHostUuid(hostUuid);
ref.setPrimaryStorageUuid(psUuid);
ref.setStatus(newStatus);
persist(ref);
} else {
sql(PrimaryStorageHostRefVO.class)
.eq(PrimaryStorageHostRefVO_.primaryStorageUuid, psUuid)
.eq(PrimaryStorageHostRefVO_.hostUuid, hostUuid)
.set(PrimaryStorageHostRefVO_.status, newStatus)
.update();
}
logger.debug(String.format("change status between primary storage[uuid:%s]" +
" and host[uuid:%s] from %s to %s in db",
psUuid, hostUuid, oldStatus == null ? "unknown" : oldStatus.toString(), newStatus));
PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData data =
new PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData();
data.setHostUuid(hostUuid);
data.setPrimaryStorageUuid(psUuid);
data.setNewStatus(newStatus);
data.setOldStatus(oldStatus);
data.setReason(reason);
datas.add(data);
}
}
}.execute();
datas.forEach(it -> evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_HOST_STATUS_CHANGED_PATH, it));
}
protected void handle(RecalculatePrimaryStorageCapacityMsg msg) {
RecalculatePrimaryStorageCapacityReply reply = new RecalculatePrimaryStorageCapacityReply();
PrimaryStorageCapacityRecalculator recalculator = new PrimaryStorageCapacityRecalculator();
recalculator.psUuids = Arrays.asList(msg.getPrimaryStorageUuid());
recalculator.recalculate();
bus.reply(msg, reply);
}
protected void handle(ReconnectPrimaryStorageMsg msg) {
ReconnectPrimaryStorageReply reply = new ReconnectPrimaryStorageReply();
doConnect(new ConnectParam(), new Completion(msg) {
@Override
public void success() {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(ChangePrimaryStorageStatusMsg msg) {
changeStatus(PrimaryStorageStatus.valueOf(msg.getStatus()));
ChangePrimaryStorageStatusReply reply = new ChangePrimaryStorageStatusReply();
bus.reply(msg, reply);
}
private void handle(final PingPrimaryStorageMsg msg) {
final PingPrimaryStorageReply reply = new PingPrimaryStorageReply();
pingHook(new Completion(msg) {
@Override
public void success() {
if (self.getStatus() == PrimaryStorageStatus.Disconnected) {
doConnect(new ConnectParam(), new NopeCompletion());
}
reply.setConnected(true);
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
if (changeStatus(PrimaryStorageStatus.Disconnected)) {
fireDisconnectedCanonicalEvent(errorCode);
}
reply.setConnected(false);
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handleBase(DownloadIsoToPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getIsoSpec().getSelectedBackupStorage().getBackupStorageUuid());
checkImageIfNeedToDownload(msg);
handle(msg);
}
private void doConnect(ConnectParam param, final Completion completion) {
thdf.chainSubmit(new ChainTask(completion) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
changeStatus(PrimaryStorageStatus.Connecting);
connectHook(param, new Completion(chain, completion) {
@Override
public void success() {
self = dbf.reload(self);
changeStatus(PrimaryStorageStatus.Connected);
logger.debug(String.format("successfully connected primary storage[uuid:%s]", self.getUuid()));
RecalculatePrimaryStorageCapacityMsg rmsg = new RecalculatePrimaryStorageCapacityMsg();
rmsg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(rmsg, PrimaryStorageConstant.SERVICE_ID);
bus.send(rmsg);
tracker.track(self.getUuid());
completion.success();
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
tracker.track(self.getUuid());
self = dbf.reload(self);
if (changeStatus(PrimaryStorageStatus.Disconnected) && !errorCode.isError(PrimaryStorageErrors.DISCONNECTED)) {
fireDisconnectedCanonicalEvent(errorCode);
}
logger.debug(String.format("failed to connect primary storage[uuid:%s], %s", self.getUuid(), errorCode));
completion.fail(errorCode);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-primary-storage-%s", self.getUuid());
}
});
}
private void handle(final ConnectPrimaryStorageMsg msg) {
final ConnectPrimaryStorageReply reply = new ConnectPrimaryStorageReply();
ConnectParam param = new ConnectParam();
param.newAdded = msg.isNewAdded();
doConnect(param, new Completion(msg) {
@Override
public void success() {
reply.setConnected(true);
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
if (msg.isNewAdded()) {
reply.setError(errorCode);
} else {
reply.setConnected(false);
}
bus.reply(msg, reply);
}
});
}
private void handleBase(CheckVolumeSnapshotOperationOnPrimaryStorageMsg msg) {
if (self.getStatus() != PrimaryStorageStatus.Connected) {
CheckVolumeSnapshotOperationOnPrimaryStorageReply reply = new CheckVolumeSnapshotOperationOnPrimaryStorageReply();
reply.setError(err(PrimaryStorageErrors.DISCONNECTED, "primary storage[uuid:%s] is not Connected", self.getUuid()));
bus.reply(msg, reply);
return;
}
handle(msg);
}
private void handleBase(DownloadDataVolumeToPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getBackupStorageRef().getBackupStorageUuid());
handle(msg);
}
@Transactional(readOnly = true)
private void checkIfBackupStorageAttachedToMyZone(String bsUuid) {
String sql = "select bs.uuid" +
" from BackupStorageVO bs, BackupStorageZoneRefVO ref" +
" where bs.uuid = ref.backupStorageUuid" +
" and ref.zoneUuid = :zoneUuid" +
" and bs.uuid = :bsUuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("zoneUuid", self.getZoneUuid());
q.setParameter("bsUuid", bsUuid);
if (q.getResultList().isEmpty()) {
throw new OperationFailureException(operr("backup storage[uuid:%s] is not attached to zone[uuid:%s] the primary storage[uuid:%s] belongs to",
bsUuid, self.getZoneUuid(), self.getUuid()));
}
}
private void handleBase(CreateTemplateFromVolumeOnPrimaryStorageMsg msg) {
checkIfBackupStorageAttachedToMyZone(msg.getBackupStorageUuid());
new PrimaryStorageValidater().disable().maintenance()
.validate();
handle(msg);
}
private void handle(final DetachPrimaryStorageFromClusterMsg msg) {
final DetachPrimaryStorageFromClusterReply reply = new DetachPrimaryStorageFromClusterReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
extpEmitter.beforeDetach(self, msg.getClusterUuid());
detachHook(msg.getClusterUuid(), new Completion(msg, chain) {
@Override
public void success() {
self = dbf.reload(self);
extpEmitter.afterDetach(self, msg.getClusterUuid());
logger.debug(String.format("successfully detached primary storage[name: %s, uuid:%s]",
self.getName(), self.getUuid()));
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
extpEmitter.failToDetach(self, msg.getClusterUuid());
logger.warn(errorCode.toString());
reply.setError(err(PrimaryStorageErrors.DETACH_ERROR, errorCode, errorCode.getDetails()));
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("detach-primary-storage-%s-from-%s", self.getUuid(), msg.getClusterUuid());
}
});
}
private void handle(PrimaryStorageDeletionMsg msg) {
PrimaryStorageInventory inv = PrimaryStorageInventory.valueOf(self);
extpEmitter.beforeDelete(inv);
deleteHook();
extpEmitter.afterDelete(inv);
tracker.untrack(self.getUuid());
PrimaryStorageDeletionReply reply = new PrimaryStorageDeletionReply();
bus.reply(msg, reply);
}
@Transactional
private void updateCapacity(long total, long avail) {
PrimaryStorageCapacityVO cvo = dbf.getEntityManager().find(PrimaryStorageCapacityVO.class,
self.getUuid(), LockModeType.PESSIMISTIC_WRITE);
DebugUtils.Assert(cvo != null, String.format("how can there is no PrimaryStorageCapacityVO[uuid:%s]", self.getUuid()));
cvo.setTotalPhysicalCapacity(total);
cvo.setAvailablePhysicalCapacity(avail);
dbf.getEntityManager().merge(cvo);
}
private void handle(PrimaryStorageReportPhysicalCapacityMsg msg) {
updateCapacity(msg.getTotalCapacity(), msg.getAvailableCapacity());
bus.reply(msg, new MessageReply());
}
protected void handleApiMessage(APIMessage msg) {
if (msg instanceof APIDeletePrimaryStorageMsg) {
handle((APIDeletePrimaryStorageMsg) msg);
} else if (msg instanceof APIChangePrimaryStorageStateMsg) {
handle((APIChangePrimaryStorageStateMsg) msg);
} else if (msg instanceof APIAttachPrimaryStorageToClusterMsg) {
handle((APIAttachPrimaryStorageToClusterMsg) msg);
} else if (msg instanceof APIDetachPrimaryStorageFromClusterMsg) {
handle((APIDetachPrimaryStorageFromClusterMsg) msg);
} else if (msg instanceof APIReconnectPrimaryStorageMsg) {
handle((APIReconnectPrimaryStorageMsg) msg);
} else if (msg instanceof APIUpdatePrimaryStorageMsg) {
handle((APIUpdatePrimaryStorageMsg) msg);
} else if (msg instanceof APISyncPrimaryStorageCapacityMsg) {
handle((APISyncPrimaryStorageCapacityMsg) msg);
} else if (msg instanceof APICleanUpImageCacheOnPrimaryStorageMsg) {
handle((APICleanUpImageCacheOnPrimaryStorageMsg) msg);
} else if (msg instanceof APICleanUpTrashOnPrimaryStorageMsg) {
handle((APICleanUpTrashOnPrimaryStorageMsg) msg);
} else if (msg instanceof APIGetTrashOnPrimaryStorageMsg) {
handle((APIGetTrashOnPrimaryStorageMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
protected void handle(APICleanUpImageCacheOnPrimaryStorageMsg msg) {
throw new OperationFailureException(operr("operation not supported"));
}
private void handle(final APIGetTrashOnPrimaryStorageMsg msg) {
APIGetTrashOnPrimaryStorageReply reply = new APIGetTrashOnPrimaryStorageReply();
List<TrashType> lists = msg.getTrashType() != null ? CollectionDSL.list(TrashType.valueOf(msg.getTrashType())) : trashLists;
Map<String, StorageTrashSpec> trashs = trash.getTrashList(self.getUuid(), lists);
if (msg.getResourceUuid() == null) {
reply.getStorageTrashSpecs().addAll(trash.getTrashList(self.getUuid(), trashLists).values());
} else {
trashs.values().forEach(t -> {
if (msg.getResourceUuid().equals(t.getResourceUuid()) && msg.getResourceType().equals(t.getResourceType())) {
reply.getStorageTrashSpecs().add(t);
}
});
}
bus.reply(msg, reply);
}
protected synchronized void updateTrashSize(CleanTrashResult result, Long size) {
result.setSize(result.getSize() + size);
}
private void cleanTrash(Long trashId, final ReturnValueCompletion<CleanTrashResult> completion) {
CleanTrashResult result = new CleanTrashResult();
StorageTrashSpec spec = trash.getTrash(self.getUuid(), trashId);
if (spec == null) {
completion.success(result);
return;
}
if (!trash.makeSureInstallPathNotUsed(spec)) {
completion.fail(operr("%s is still in using by %s, cannot remove it from trash...", spec.getInstallPath(), spec.getResourceType()));
return;
}
DeleteVolumeBitsOnPrimaryStorageMsg msg = new DeleteVolumeBitsOnPrimaryStorageMsg();
msg.setPrimaryStorageUuid(self.getUuid());
msg.setInstallPath(spec.getInstallPath());
msg.setHypervisorType(spec.getHypervisorType());
msg.setFolder(spec.isFolder());
msg.setBitsUuid(spec.getResourceUuid());
msg.setBitsType(spec.getResourceType());
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, self.getUuid());
bus.send(msg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply reply) {
if (reply.isSuccess()) {
logger.info(String.format("Deleted volume %s in Trash.", spec.getInstallPath()));
IncreasePrimaryStorageCapacityMsg imsg = new IncreasePrimaryStorageCapacityMsg();
imsg.setPrimaryStorageUuid(self.getUuid());
imsg.setDiskSize(spec.getSize());
bus.makeTargetServiceIdByResourceUuid(imsg, PrimaryStorageConstant.SERVICE_ID, self.getUuid());
bus.send(imsg);
trash.removeFromDb(trashId);
logger.info(String.format("Returned space[size:%s] to PS %s after volume migration", spec.getSize(), self.getUuid()));
result.setSize(spec.getSize());
result.setResourceUuids(CollectionDSL.list(spec.getResourceUuid()));
completion.success(result);
} else {
logger.warn(String.format("Failed to delete volume %s in Trash, because: %s", spec.getInstallPath(), reply.getError().getDetails()));
completion.fail(reply.getError());
}
}
});
}
protected void handle(final CleanUpTrashOnPrimaryStorageMsg msg) {
MessageReply reply = new MessageReply();
thdf.chainSubmit(new ChainTask(msg) {
private String name = String.format("cleanup-trash-on-%s", self.getUuid());
@Override
public String getSyncSignature() {
return name;
}
@Override
public void run(SyncTaskChain chain) {
cleanUpTrash(msg.getTrashId(), new ReturnValueCompletion<CleanTrashResult>(chain) {
@Override
public void success(CleanTrashResult result) {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return name;
}
});
}
private void cleanUpTrash(Long trashId, final ReturnValueCompletion<CleanTrashResult> completion) {
if (trashId != null) {
cleanTrash(trashId, completion);
return;
}
CleanTrashResult result = new CleanTrashResult();
Map<String, StorageTrashSpec> trashs = trash.getTrashList(self.getUuid(), trashLists);
if (trashs.isEmpty()) {
completion.success(result);
return;
}
List<ErrorCode> errs = new ArrayList<>();
new While<>(trashs.entrySet()).all((trash, coml) -> {
cleanTrash(trash.getValue().getTrashId(), new ReturnValueCompletion<CleanTrashResult>(coml) {
@Override
public void success(CleanTrashResult res) {
result.getResourceUuids().add(res.getResourceUuids().get(0));
updateTrashSize(result, res.getSize());
coml.done();
}
@Override
public void fail(ErrorCode errorCode) {
errs.add(errorCode);
coml.done();
}
});
}).run(new NoErrorCompletion(completion) {
@Override
public void done() {
if (errs.isEmpty()) {
completion.success(result);
} else {
completion.fail(errs.get(0));
}
}
});
}
protected void handle(final APICleanUpTrashOnPrimaryStorageMsg msg) {
APICleanUpTrashOnPrimaryStorageEvent evt = new APICleanUpTrashOnPrimaryStorageEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
private String name = String.format("cleanup-trash-on-%s", self.getUuid());
@Override
public String getSyncSignature() {
return name;
}
@Override
public void run(SyncTaskChain chain) {
cleanUpTrash(msg.getTrashId(), new ReturnValueCompletion<CleanTrashResult>(chain) {
@Override
public void success(CleanTrashResult result) {
evt.setResult(result);
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return name;
}
});
}
private void handle(final APISyncPrimaryStorageCapacityMsg msg) {
final APISyncPrimaryStorageCapacityEvent evt = new APISyncPrimaryStorageCapacityEvent(msg.getId());
SyncPrimaryStorageCapacityMsg smsg = new SyncPrimaryStorageCapacityMsg();
smsg.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
bus.makeTargetServiceIdByResourceUuid(smsg, PrimaryStorageConstant.SERVICE_ID, smsg.getPrimaryStorageUuid());
bus.send(smsg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
evt.setSuccess(false);
evt.setError(reply.getError());
bus.publish(evt);
return;
}
SyncPrimaryStorageCapacityReply reply1 = reply.castReply();
if (!reply1.isSuccess()) {
evt.setSuccess(false);
evt.setError(reply1.getError());
} else {
evt.setInventory(reply1.getInventory());
}
bus.publish(evt);
}
});
}
private void handle(final SyncPrimaryStorageCapacityMsg msg) {
SyncPrimaryStorageCapacityReply reply = new SyncPrimaryStorageCapacityReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("sync-capacity-of-primary-storage-%s", self.getUuid()));
chain.then(new ShareFlow() {
Long volumeUsage;
Long snapshotUsage;
Long totalPhysicalSize;
Long availablePhysicalSize;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "sync-capacity-used-by-volumes";
@Override
public void run(final FlowTrigger trigger, Map data) {
VolumeReportPrimaryStorageCapacityUsageMsg msg = new VolumeReportPrimaryStorageCapacityUsageMsg();
msg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(msg, VolumeConstant.SERVICE_ID);
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VolumeReportPrimaryStorageCapacityUsageReply r = reply.castReply();
volumeUsage = r.getUsedCapacity();
volumeUsage = ratioMgr.calculateByRatio(self.getUuid(), volumeUsage);
trigger.next();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "sync-capacity-used-by-volume-snapshots";
@Override
public void run(final FlowTrigger trigger, Map data) {
VolumeSnapshotReportPrimaryStorageCapacityUsageMsg msg = new VolumeSnapshotReportPrimaryStorageCapacityUsageMsg();
msg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(msg, VolumeSnapshotConstant.SERVICE_ID);
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
// note: snapshot size is physical size,
// don't calculate over-provisioning here
VolumeSnapshotReportPrimaryStorageCapacityUsageReply r = reply.castReply();
snapshotUsage = r.getUsedSize();
trigger.next();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "sync-physical-capacity";
@Override
public void run(final FlowTrigger trigger, Map data) {
syncPhysicalCapacity(new ReturnValueCompletion<PhysicalCapacityUsage>(trigger) {
@Override
public void success(PhysicalCapacityUsage returnValue) {
totalPhysicalSize = returnValue.totalPhysicalSize;
availablePhysicalSize = returnValue.availablePhysicalSize;
availablePhysicalSize = availablePhysicalSize < 0 ? 0 : availablePhysicalSize;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
writeToDb();
self = dbf.reload(self);
reply.setInventory(getSelfInventory());
bus.reply(msg, reply);
}
private void writeToDb() {
PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid());
updater.run(new PrimaryStorageCapacityUpdaterRunnable() {
@Override
public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) {
long avail = cap.getTotalCapacity() - volumeUsage - snapshotUsage;
cap.setAvailableCapacity(avail);
cap.setAvailablePhysicalCapacity(availablePhysicalSize);
cap.setTotalPhysicalCapacity(totalPhysicalSize);
return cap;
}
});
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setSuccess(false);
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
protected void updatePrimaryStorage(APIUpdatePrimaryStorageMsg msg, ReturnValueCompletion<PrimaryStorageVO> completion) {
boolean update = false;
if (msg.getName() != null) {
self.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
self.setDescription(msg.getDescription());
update = true;
}
completion.success(update? self : null);
}
private void handle(APIUpdatePrimaryStorageMsg msg) {
APIUpdatePrimaryStorageEvent evt = new APIUpdatePrimaryStorageEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
updatePrimaryStorage(msg, new ReturnValueCompletion<PrimaryStorageVO>(msg) {
@Override
public void success(PrimaryStorageVO vo) {
if (vo != null){
self = dbf.updateAndRefresh(vo);
}
evt.setInventory(getSelfInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("update-primary-storage-%s", self.getUuid());
}
});
}
protected boolean changeStatus(PrimaryStorageStatus status) {
self = dbf.reload(self);
if (status == self.getStatus()) {
return false;
}
PrimaryStorageStatus oldStatus = self.getStatus();
self.setStatus(status);
self = dbf.updateAndRefresh(self);
PrimaryStorageStatusChangedData d = new PrimaryStorageStatusChangedData();
d.setInventory(PrimaryStorageInventory.valueOf(self));
d.setPrimaryStorageUuid(self.getUuid());
d.setOldStatus(oldStatus.toString());
d.setNewStatus(status.toString());
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_STATUS_CHANGED_PATH, d);
logger.debug(String.format("the primary storage[uuid:%s, name:%s] changed status from %s to %s",
self.getUuid(), self.getName(), oldStatus, status));
return true;
}
protected void handle(APIReconnectPrimaryStorageMsg msg) {
final APIReconnectPrimaryStorageEvent evt = new APIReconnectPrimaryStorageEvent(msg.getId());
ReconnectPrimaryStorageMsg rmsg = new ReconnectPrimaryStorageMsg();
rmsg.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
bus.makeTargetServiceIdByResourceUuid(rmsg, PrimaryStorageConstant.SERVICE_ID, rmsg.getPrimaryStorageUuid());
bus.send(rmsg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
evt.setError(reply.getError());
} else {
self = dbf.reload(self);
evt.setInventory(getSelfInventory());
}
bus.publish(evt);
}
});
}
// don't use chainTask for this method, the sub-sequential DetachPrimaryStorageFromClusterMsg
// is in the queue
protected void handle(final APIDetachPrimaryStorageFromClusterMsg msg) {
final APIDetachPrimaryStorageFromClusterEvent evt = new APIDetachPrimaryStorageFromClusterEvent(msg.getId());
try {
extpEmitter.preDetach(self, msg.getClusterUuid());
} catch (PrimaryStorageException e) {
throw new OperationFailureException(err(PrimaryStorageErrors.DETACH_ERROR, e.getMessage()));
}
// if not, HA will allocate wrong host, rollback when API fail
SimpleQuery<PrimaryStorageClusterRefVO> q = dbf.createQuery(PrimaryStorageClusterRefVO.class);
q.add(PrimaryStorageClusterRefVO_.clusterUuid, Op.EQ, msg.getClusterUuid());
q.add(PrimaryStorageClusterRefVO_.primaryStorageUuid, Op.EQ, msg.getPrimaryStorageUuid());
List<PrimaryStorageClusterRefVO> refs = q.list();
dbf.removeCollection(refs, PrimaryStorageClusterRefVO.class);
String issuer = PrimaryStorageVO.class.getSimpleName();
List<PrimaryStorageDetachStruct> ctx = new ArrayList<>();
PrimaryStorageDetachStruct struct = new PrimaryStorageDetachStruct();
struct.setClusterUuid(msg.getClusterUuid());
struct.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
ctx.add(struct);
casf.asyncCascade(PrimaryStorageConstant.PRIMARY_STORAGE_DETACH_CODE, issuer, ctx, new Completion(msg) {
@Override
public void success() {
self = dbf.reload(self);
evt.setInventory(PrimaryStorageInventory.valueOf(self));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
//has removed RefVO before, roll back
dbf.updateAndRefresh(refs.get(0));
evt.setError(errorCode);
bus.publish(evt);
}
});
}
protected void handle(final APIAttachPrimaryStorageToClusterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
attachCluster(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return String.format("attach-primary-storage-%s-to-cluster-%s", self.getUuid(), msg.getClusterUuid());
}
});
}
private void attachCluster(final APIAttachPrimaryStorageToClusterMsg msg, final NoErrorCompletion completion) {
final APIAttachPrimaryStorageToClusterEvent evt = new APIAttachPrimaryStorageToClusterEvent(msg.getId());
try {
extpEmitter.preAttach(self, msg.getClusterUuid());
} catch (PrimaryStorageException pe) {
evt.setError(err(PrimaryStorageErrors.ATTACH_ERROR, pe.getMessage()));
bus.publish(evt);
completion.done();
return;
}
extpEmitter.beforeAttach(self, msg.getClusterUuid());
attachHook(msg.getClusterUuid(), new Completion(msg, completion) {
@Override
public void success() {
PrimaryStorageClusterRefVO ref = new PrimaryStorageClusterRefVO();
ref.setClusterUuid(msg.getClusterUuid());
ref.setPrimaryStorageUuid(self.getUuid());
dbf.persist(ref);
self = dbf.reload(self);
extpEmitter.afterAttach(self, msg.getClusterUuid());
PrimaryStorageInventory pinv = self.toInventory();
evt.setInventory(pinv);
logger.debug(String.format("successfully attached primary storage[name:%s, uuid:%s]",
pinv.getName(), pinv.getUuid()));
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
extpEmitter.failToAttach(self, msg.getClusterUuid());
evt.setError(err(PrimaryStorageErrors.ATTACH_ERROR, errorCode, errorCode.getDetails()));
bus.publish(evt);
completion.done();
}
});
}
private void stopAllVms(List<String> vmUuids) {
final List<StopVmInstanceMsg> msgs = new ArrayList<StopVmInstanceMsg>();
for (String vmUuid : vmUuids) {
StopVmInstanceMsg msg = new StopVmInstanceMsg();
msg.setVmInstanceUuid(vmUuid);
bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, vmUuid);
msgs.add(msg);
}
bus.send(msgs, new CloudBusListCallBack(null) {
@Override
public void run(List<MessageReply> replies) {
StringBuilder sb = new StringBuilder();
boolean success = true;
for (MessageReply r : replies) {
if (!r.isSuccess()) {
StopVmInstanceMsg msg = msgs.get(replies.indexOf(r));
String err = String.format("\nfailed to stop vm[uuid:%s] on primary storage[uuid:%s], %s",
msg.getVmInstanceUuid(), self.getUuid(), r.getError());
sb.append(err);
success = false;
}
}
if (!success) {
logger.warn(sb.toString());
}
}
});
}
protected void handle(APIChangePrimaryStorageStateMsg msg) {
APIChangePrimaryStorageStateEvent evt = new APIChangePrimaryStorageStateEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
PrimaryStorageState currState = self.getState();
PrimaryStorageStateEvent event = PrimaryStorageStateEvent.valueOf(msg.getStateEvent());
PrimaryStorageState nextState = AbstractPrimaryStorage.getNextState(currState, event);
try {
extpEmitter.preChange(self, event);
} catch (PrimaryStorageException e) {
evt.setError(err(SysErrors.CHANGE_RESOURCE_STATE_ERROR, e.getMessage()));
bus.publish(evt);
return;
}
extpEmitter.beforeChange(self, event);
if (PrimaryStorageStateEvent.maintain == event) {
logger.warn(String.format("Primary Storage %s will enter maintenance mode, ignore unknown status VMs", msg.getPrimaryStorageUuid()));
List<String> vmUuids = SQL.New("select vm.uuid from VmInstanceVO vm, VolumeVO vol" +
" where vol.primaryStorageUuid =:uuid and vol.vmInstanceUuid = vm.uuid group by vm.uuid", String.class)
.param("uuid", self.getUuid()).list();
if ( vmUuids.size() != 0 ) {
stopAllVms(vmUuids);
}
}
changeStateHook(event, nextState);
self.setState(nextState);
self = dbf.updateAndRefresh(self);
extpEmitter.afterChange(self, event, currState);
PrimaryStorageCanonicalEvent.PrimaryStorageStateChangedData data = new PrimaryStorageCanonicalEvent.PrimaryStorageStateChangedData();
data.setInventory(PrimaryStorageInventory.valueOf(self));
data.setPrimaryStorageUuid(self.getUuid());
data.setOldState(currState);
data.setNewState(nextState);
evt.setInventory(PrimaryStorageInventory.valueOf(self));
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_STATE_CHANGED_PATH, data);
bus.publish(evt);
chain.next();
}
@Override
public String getName() {
return String.format("change-primary-storage-%s-state", self.getUuid());
}
});
}
protected void handle(APIDeletePrimaryStorageMsg msg) {
final APIDeletePrimaryStorageEvent evt = new APIDeletePrimaryStorageEvent(msg.getId());
final String issuer = PrimaryStorageVO.class.getSimpleName();
final List<PrimaryStorageInventory> ctx = PrimaryStorageInventory.valueOf(Arrays.asList(self));
self.setState(PrimaryStorageState.Deleting);
self = dbf.updateAndRefresh(self);
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("delete-primary-storage-%s", msg.getUuid()));
if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
} else {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
}
// Due to issue #1412, deleting PS asynchronously might leave VmInstanceEO in
// database. Since eoCleanup() could be called before deleting VmInstanceVO.
chain.then(new NoRollbackFlow() {
@Override
public void run(FlowTrigger trigger, Map data) {
casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
chain.done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.publish(evt);
PrimaryStorageDeletedData d = new PrimaryStorageDeletedData();
d.setPrimaryStorageUuid(self.getUuid());
d.setInventory(PrimaryStorageInventory.valueOf(self));
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_DELETED_PATH, d);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(err(SysErrors.DELETE_RESOURCE_ERROR, errCode, errCode.getDetails()));
bus.publish(evt);
}
}).start();
}
protected void handle(GetVolumeSnapshotSizeOnPrimaryStorageMsg msg) {
GetVolumeSnapshotSizeOnPrimaryStorageReply reply = new GetVolumeSnapshotSizeOnPrimaryStorageReply();
VolumeSnapshotVO snapshotVO = dbf.findByUuid(msg.getSnapshotUuid(), VolumeSnapshotVO.class);
reply.setSize(snapshotVO.getSize());
reply.setActualSize(snapshotVO.getSize());
bus.reply(msg, reply);
}
// don't attach any cluster
public boolean isUnmounted() {
long count = Q.New(PrimaryStorageClusterRefVO.class)
.eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, this.self.getUuid()).count();
return count == 0;
}
@VmAttachVolumeValidatorMethod
static void vmAttachVolumeValidator(String vmUuid, String volumeUuid) {
PrimaryStorageState state = SQL.New("select pri.state from PrimaryStorageVO pri " +
"where pri.uuid = (select vol.primaryStorageUuid from VolumeVO vol where vol.uuid = :volUuid)", PrimaryStorageState.class)
.param("volUuid", volumeUuid)
.find();
if(state == PrimaryStorageState.Maintenance){
throw new OperationFailureException(
operr("cannot attach volume[uuid:%s] whose primary storage is Maintenance", volumeUuid));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot;
import static org.apache.jena.riot.RDFLanguages.* ;
import java.io.InputStream ;
import java.io.Reader ;
import java.util.Map ;
import java.util.Set ;
import org.apache.jena.atlas.lib.DS ;
import org.apache.jena.atlas.lib.InternalErrorException ;
import org.apache.jena.atlas.web.ContentType ;
import org.apache.jena.riot.lang.* ;
import org.apache.jena.riot.system.ErrorHandler ;
import org.apache.jena.riot.system.ErrorHandlerFactory ;
import org.apache.jena.riot.system.ParserProfile ;
import org.apache.jena.riot.system.StreamRDF ;
import org.apache.jena.riot.thrift.BinRDF ;
import org.apache.jena.sparql.util.Context ;
/** The registry of languages and parsers.
* To register a new parser:
* <ul>
* <li>Register the language with {@link RDFLanguages}</li>
* <li>Register the parser factory with {@link RDFParserRegistry}</li>
* </ul>
*/
public class RDFParserRegistry
{
/** Map Jena I/O names to language */
private static Map<String, Lang> mapJenaNameToLang = DS.map() ;
/** map language to a parser factory */
private static Map<Lang, ReaderRIOTFactory> langToParserFactory = DS.map() ;
/** Known triples languages */
private static Set<Lang> langTriples = DS.set() ;
/** Known quads languages */
private static Set<Lang> langQuads = DS.set() ;
/** Generic parser factory. */
private static ReaderRIOTFactory parserFactory = new ReaderRIOTFactoryImpl() ;
private static ReaderRIOTFactory parserFactoryJsonLD = new ReaderRIOTFactoryJSONLD() ;
private static ReaderRIOTFactory parserFactoryThrift = new ReaderRIOTFactoryThrift() ;
private static ReaderRIOTFactory parserFactoryTriX = new ReaderRIOTFactoryTriX() ;
private static ReaderRIOTFactory parserFactoryRDFNULL = new ReaderRIOTFactoryRDFNULL() ;
private static boolean initialized = false ;
static { init() ; }
public static void init()
{
if ( initialized ) return ;
initialized = true ;
initStandard() ;
}
private static void initStandard()
{
// Make sure the constants are initialized.
RDFLanguages.init() ;
registerLangTriples(RDFXML, parserFactory) ;
registerLangTriples(NTRIPLES, parserFactory) ;
registerLangTriples(N3, parserFactory) ;
registerLangTriples(TURTLE, parserFactory) ;
registerLangTriples(JSONLD, parserFactoryJsonLD) ;
registerLangTriples(RDFJSON, parserFactory) ;
registerLangTriples(CSV, parserFactory) ;
registerLangTriples(THRIFT, parserFactoryThrift) ;
registerLangTriples(TRIX, parserFactoryTriX) ;
registerLangTriples(RDFNULL, parserFactoryRDFNULL) ;
registerLangQuads(JSONLD, parserFactoryJsonLD) ;
registerLangQuads(NQUADS, parserFactory) ;
registerLangQuads(TRIG, parserFactory) ;
registerLangQuads(THRIFT, parserFactoryThrift) ;
registerLangQuads(TRIX, parserFactoryTriX) ;
registerLangQuads(RDFNULL, parserFactoryRDFNULL) ;
}
/** Register a language and it's parser factory.
* To create a {@link Lang} object use {@link LangBuilder}.
*/
private static void registerLang(Lang lang, ReaderRIOTFactory factory)
{
RDFLanguages.register(lang) ;
langToParserFactory.put(lang, factory) ;
}
/** Register a language and it's parser factory.
* To create a {@link Lang} object use {@link LangBuilder}.
*/
public static void registerLangTriples(Lang lang, ReaderRIOTFactory factory)
{
langTriples.add(lang) ;
registerLang(lang, factory) ;
}
/** Register a language and it's parser factory.
* To create a {@link Lang} object use {@link LangBuilder}.
*/
public static void registerLangQuads(Lang lang, ReaderRIOTFactory factory)
{
langQuads.add(lang) ;
registerLang(lang, factory) ;
}
/** Remove registration */
public static void removeRegistration(Lang lang)
{
RDFLanguages.unregister(lang) ;
langToParserFactory.remove(lang) ;
}
/** Return the parser factory for the language, or null if not registered */
public static ReaderRIOTFactory getFactory(Lang language)
{
return langToParserFactory.get(language) ;
}
/** return true if the language has a registered parser. */
public static boolean isRegistered(Lang lang) { return langToParserFactory.containsKey(lang) ; }
/** return true if the language is registered with the triples parser factories */
public static boolean isTriples(Lang lang) { return langTriples.contains(lang) ; }
/** return true if the language is registered with the quads parser factories */
public static boolean isQuads(Lang lang) { return langQuads.contains(lang) ; }
// Parser factories
private static class ReaderRIOTFactoryImpl implements ReaderRIOTFactory
{
@Override
public ReaderRIOT create(Lang lang) {
return new ReaderRIOTLang(lang) ;
}
}
private static class ReaderRIOTLang implements ReaderRIOT
{
private final Lang lang ;
private ErrorHandler errorHandler ;
private ParserProfile parserProfile = null ;
ReaderRIOTLang(Lang lang) {
this.lang = lang ;
errorHandler = ErrorHandlerFactory.getDefaultErrorHandler() ;
}
@Override
public void read(InputStream in, String baseURI, ContentType ct, StreamRDF output, Context context) {
@SuppressWarnings("deprecation")
LangRIOT parser = RiotParsers.createParser(in, lang, baseURI, output) ;
if ( parserProfile != null )
parser.setProfile(parserProfile);
if ( errorHandler != null )
parser.getProfile().setHandler(errorHandler) ;
parser.parse() ;
}
@Override
public void read(Reader in, String baseURI, ContentType ct, StreamRDF output, Context context) {
@SuppressWarnings("deprecation")
LangRIOT parser = RiotParsers.createParser(in, lang, baseURI, output) ;
parser.getProfile().setHandler(errorHandler) ;
parser.parse() ;
}
@Override public ErrorHandler getErrorHandler() { return errorHandler ; }
@Override public void setErrorHandler(ErrorHandler errorHandler) { this.errorHandler = errorHandler ; }
@Override public ParserProfile getParserProfile() { return parserProfile ; }
@Override public void setParserProfile(ParserProfile parserProfile) {
this.parserProfile = parserProfile ;
this.errorHandler = parserProfile.getHandler() ;
}
}
private static class ReaderRIOTFactoryJSONLD implements ReaderRIOTFactory {
@Override
public ReaderRIOT create(Lang language) {
if ( !Lang.JSONLD.equals(language) )
throw new InternalErrorException("Attempt to parse " + language + " as JSON-LD") ;
return new JsonLDReader() ;
}
}
private static class ReaderRIOTFactoryThrift implements ReaderRIOTFactory {
@Override
public ReaderRIOT create(Lang language) {
return new ReaderRDFThrift() ;
}}
private static class ReaderRDFThrift implements ReaderRIOT {
@Override
public void read(InputStream in, String baseURI, ContentType ct, StreamRDF output, Context context) {
BinRDF.inputStreamToStream(in, output) ;
}
@Override
public void read(Reader reader, String baseURI, ContentType ct, StreamRDF output, Context context) {
throw new RiotException("RDF Thrift : Reading binary data from a java.io.reader is not supported. Please use an InputStream") ;
}
@Override
public ErrorHandler getErrorHandler() {
return null ;
}
@Override
public void setErrorHandler(ErrorHandler errorHandler) {}
@Override
public ParserProfile getParserProfile() {
return null ;
}
@Override
public void setParserProfile(ParserProfile profile) {}
}
private static class ReaderRIOTFactoryTriX implements ReaderRIOTFactory {
@Override
public ReaderRIOT create(Lang language) {
return new ReaderTriX() ;
}
}
private static class ReaderRIOTFactoryRDFNULL implements ReaderRIOTFactory {
@Override
public ReaderRIOT create(Lang language) {
return new ReaderRDFNULL() ;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.driver;
import org.apache.tinkerpop.gremlin.driver.exception.ConnectionException;
import org.apache.tinkerpop.gremlin.driver.message.RequestMessage;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelPromise;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* A single connection to a Gremlin Server instance.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
final class Connection {
private static final Logger logger = LoggerFactory.getLogger(Connection.class);
private final Channel channel;
private final URI uri;
private final ConcurrentMap<UUID, ResultQueue> pending = new ConcurrentHashMap<>();
private final Cluster cluster;
private final Client client;
private final ConnectionPool pool;
public static final int MAX_IN_PROCESS = 4;
public static final int MIN_IN_PROCESS = 1;
public static final int MAX_WAIT_FOR_CONNECTION = 3000;
public static final int MAX_CONTENT_LENGTH = 65536;
public static final int RECONNECT_INITIAL_DELAY = 1000;
public static final int RECONNECT_INTERVAL = 1000;
public static final int RESULT_ITERATION_BATCH_SIZE = 64;
/**
* When a {@code Connection} is borrowed from the pool, this number is incremented to indicate the number of
* times it has been taken and is decremented when it is returned. This number is one indication as to how
* busy a particular {@code Connection} is.
*/
public final AtomicInteger borrowed = new AtomicInteger(0);
private final AtomicReference<Class<Channelizer>> channelizerClass = new AtomicReference<>(null);
private volatile boolean isDead = false;
private final int maxInProcess;
private final String connectionLabel;
private final Channelizer channelizer;
private final AtomicReference<CompletableFuture<Void>> closeFuture = new AtomicReference<>();
private final AtomicBoolean shutdownInitiated = new AtomicBoolean(false);
public Connection(final URI uri, final ConnectionPool pool, final int maxInProcess) throws ConnectionException {
this.uri = uri;
this.cluster = pool.getCluster();
this.client = pool.getClient();
this.pool = pool;
this.maxInProcess = maxInProcess;
connectionLabel = String.format("Connection{host=%s}", pool.host);
if (cluster.isClosing()) throw new IllegalStateException("Cannot open a connection while the cluster after close() is called");
final Bootstrap b = this.cluster.getFactory().createBootstrap();
try {
if (channelizerClass.get() == null) {
channelizerClass.compareAndSet(null, (Class<Channelizer>) Class.forName(cluster.connectionPoolSettings().channelizer));
}
channelizer = channelizerClass.get().newInstance();
channelizer.init(this);
b.channel(NioSocketChannel.class).handler(channelizer);
channel = b.connect(uri.getHost(), uri.getPort()).sync().channel();
channelizer.connected();
logger.info("Created new connection for {}", uri);
} catch (Exception ie) {
logger.debug("Error opening connection on {}", uri);
throw new ConnectionException(uri, "Could not open connection", ie);
}
}
/**
* A connection can only have so many things in process happening on it at once, where "in process" refers to
* the maximum number of in-process requests less the number of pending responses.
*/
public int availableInProcess() {
return maxInProcess - pending.size();
}
public boolean isDead() {
return isDead;
}
public boolean isClosed() {
return closeFuture.get() != null;
}
URI getUri() {
return uri;
}
Cluster getCluster() {
return cluster;
}
Client getClient() {
return client;
}
ConcurrentMap<UUID, ResultQueue> getPending() {
return pending;
}
public CompletableFuture<Void> closeAsync() {
final CompletableFuture<Void> future = new CompletableFuture<>();
if (!closeFuture.compareAndSet(null, future))
return closeFuture.get();
// make sure all requests in the queue are fully processed before killing. if they are then shutdown
// can be immediate. if not this method will signal the readCompleted future defined in the write()
// operation to check if it can close. in this way the connection no longer receives writes, but
// can continue to read. If a request never comes back the future won't get fulfilled and the connection
// will maintain a "pending" request, that won't quite ever go away. The build up of such a dead requests
// on a connection in the connection pool will force the pool to replace the connection for a fresh one
if (pending.isEmpty()) {
if (null == channel)
future.complete(null);
else
shutdown(future);
}
return future;
}
public void close() {
try {
closeAsync().get();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public ChannelPromise write(final RequestMessage requestMessage, final CompletableFuture<ResultSet> future) {
// once there is a completed write, then create a traverser for the result set and complete
// the promise so that the client knows that that it can start checking for results.
final Connection thisConnection = this;
final ChannelPromise promise = channel.newPromise()
.addListener(f -> {
if (!f.isSuccess()) {
if (logger.isDebugEnabled())
logger.debug(String.format("Write on connection %s failed", thisConnection.getConnectionInfo()), f.cause());
thisConnection.isDead = true;
thisConnection.returnToPool();
future.completeExceptionally(f.cause());
} else {
final LinkedBlockingQueue<Result> resultLinkedBlockingQueue = new LinkedBlockingQueue<>();
final CompletableFuture<Void> readCompleted = new CompletableFuture<>();
readCompleted.thenAcceptAsync(v -> {
thisConnection.returnToPool();
// close was signaled in closeAsync() but there were pending messages at that time. attempt
// the shutdown if the returned result cleared up the last pending message
if (isClosed() && pending.isEmpty())
shutdown(closeFuture.get());
}, cluster.executor());
final ResultQueue handler = new ResultQueue(resultLinkedBlockingQueue, readCompleted);
pending.put(requestMessage.getRequestId(), handler);
future.complete(new ResultSet(handler, cluster.executor(), readCompleted));
}
});
channel.writeAndFlush(requestMessage, promise);
return promise;
}
public void returnToPool() {
try {
if (pool != null) pool.returnConnection(this);
} catch (ConnectionException ce) {
if (logger.isDebugEnabled())
logger.debug("Returned {} connection to {} but an error occurred - {}", this.getConnectionInfo(), pool, ce.getMessage());
}
}
private void shutdown(final CompletableFuture<Void> future) {
// shutdown can be called directly from closeAsync() or after write() and therefore this method should only
// be called once. once shutdown is initiated, it shoudln't be executed a second time or else it sends more
// messages at the server and leads to ugly log messages over there.
if (shutdownInitiated.compareAndSet(false, true)) {
if (client instanceof Client.SessionedClient) {
// maybe this should be delegated back to the Client implementation???
final RequestMessage closeMessage = client.buildMessage(RequestMessage.build(Tokens.OPS_CLOSE));
final CompletableFuture<ResultSet> closed = new CompletableFuture<>();
write(closeMessage, closed);
try {
// make sure we get a response here to validate that things closed as expected. on error, we'll let
// the server try to clean up on its own. the primary error here should probably be related to
// protocol issues which should not be something a user has to fuss with.
closed.get();
} catch (Exception ex) {
final String msg = String.format(
"Encountered an error trying to close connection on %s - force closing - server will close session on shutdown or timeout.",
((Client.SessionedClient) client).getSessionId());
logger.warn(msg, ex);
}
}
channelizer.close(channel);
final ChannelPromise promise = channel.newPromise();
promise.addListener(f -> {
if (f.cause() != null)
future.completeExceptionally(f.cause());
else
future.complete(null);
});
channel.close(promise);
}
}
public String getConnectionInfo() {
return String.format("Connection{host=%s, isDead=%s, borrowed=%s, pending=%s}",
pool.host, isDead, borrowed, pending.size());
}
@Override
public String toString() {
return connectionLabel;
}
}
| |
package com.iappsam.servlet.forms.iirup;
import java.io.IOException;
import java.sql.Date;
import java.util.ArrayList;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.iappsam.DivisionOffice;
import com.iappsam.Employee;
import com.iappsam.Item;
import com.iappsam.forms.Disposal;
import com.iappsam.forms.IIRUP;
import com.iappsam.forms.IIRUPLine;
import com.iappsam.managers.IIRUPManager;
import com.iappsam.managers.PersonManager;
import com.iappsam.managers.exceptions.TransactionException;
import com.iappsam.managers.sessions.IIRUPManagerSession;
import com.iappsam.managers.sessions.PersonManagerSession;
import com.iappsam.util.ApplicationContext;
/**
* Servlet implementation class IIRUPForm
*/
@WebServlet("/forms/iirup/iirup.do")
public class IIRUPForm extends HttpServlet {
private static final long serialVersionUID = 1L;
// IIRUP
// iirupLine
private ArrayList<String> quantity;
private ArrayList<String> itemIDs;
private ArrayList<String> yearsInService;
private ArrayList<String> depreciation;
private ArrayList<String> disposition;
private ArrayList<String> appraisal;
private ArrayList<String> orNumber;
private String approvedBy;
private String nameOfInspector;
private String nameOfWitness;
private String requestedBy;
/**
* @see HttpServlet#HttpServlet()
*/
public IIRUPForm() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
* response)
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
* response)
*/
private String getDesignationFromEntry(String input) {
System.out.println(input);
for (int i = 0; i < input.length(); i++) {
if (input.charAt(i) == ':')
return input.substring(i + 2);
}
return null;
}
private String getPersonFromEntry(String input) {
System.out.println("Input for getPersonFromEntry: " + input);
for (int i = 0; i < input.length(); i++) {
if (input.charAt(i) == ':')
return input.substring(0, i - 1);
}
return null;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
IIRUPManager iManage = new IIRUPManagerSession();
PersonManager pManager = new PersonManagerSession();
// extra
String accountableOfficer = (String) request.getSession().getAttribute("accountableOfficer");
Date asOfDate = (Date) request.getSession().getAttribute("asOfDate");
DivisionOffice station = (DivisionOffice) request.getSession().getAttribute("station");
approvedBy = request.getParameter("approvedBy");
nameOfInspector = request.getParameter("nameOfInspector");
nameOfWitness = request.getParameter("nameOfWitness");
requestedBy = (String) request.getSession().getAttribute("requestedBy");
itemIDs = (ArrayList<String>) request.getSession().getAttribute("itemList");
quantity = (ArrayList<String>) request.getSession().getAttribute("quantity");
yearsInService = (ArrayList<String>) request.getSession().getAttribute("yearsInService");
depreciation = (ArrayList<String>) request.getSession().getAttribute("depreciation");
disposition = (ArrayList<String>) request.getSession().getAttribute("disposition");
appraisal = (ArrayList<String>) request.getSession().getAttribute("appraisal");
orNumber = (ArrayList<String>) request.getSession().getAttribute("orNumber");
String accountableOfficerEmployeeID = "";
int limit;
try {
limit = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(accountableOfficer)).getId()).size();
Employee accountableEmployee = null;
Employee approvedByEmployee = null;
Employee requestedByEmployee = null;
Employee witnessedByEmployee = null;
Employee inspectedByEmployee = null;
for (int i = 0; i < limit; i++) {
Employee temp;
temp = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(accountableOfficer)).getId()).get(i);
if (temp.getDesignation().equals(getDesignationFromEntry(accountableOfficer))) {
accountableOfficerEmployeeID = "" + temp.getId();
accountableEmployee = temp;
break;
}
}
limit = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(approvedBy)).getId()).size();
for (int i = 0; i < limit; i++) {
Employee temp;
temp = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(approvedBy)).getId()).get(i);
if (temp.getDesignation().equals(getDesignationFromEntry(approvedBy))) {
approvedByEmployee = temp;
break;
}
}
limit = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(requestedBy)).getId()).size();
for (int i = 0; i < limit; i++) {
Employee temp;
temp = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(requestedBy)).getId()).get(i);
if (temp.getDesignation().equals(getDesignationFromEntry(requestedBy))) {
requestedByEmployee = temp;
break;
}
}
limit = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(nameOfInspector)).getId()).size();
for (int i = 0; i < limit; i++) {
Employee temp;
temp = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(nameOfInspector)).getId()).get(i);
if (temp.getDesignation().equals(getDesignationFromEntry(nameOfInspector))) {
inspectedByEmployee = temp;
break;
}
}
limit = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(nameOfWitness)).getId()).size();
for (int i = 0; i < limit; i++) {
Employee temp;
temp = pManager.getEmployeeByPerson(pManager.getPerson(getPersonFromEntry(nameOfWitness)).getId()).get(i);
if (temp.getDesignation().equals(getDesignationFromEntry(nameOfWitness))) {
witnessedByEmployee = temp;
break;
}
}
IIRUP iirupForm = new IIRUP(asOfDate, accountableEmployee, requestedByEmployee, approvedByEmployee, inspectedByEmployee, witnessedByEmployee);
if (station.getOfficeName() != null)
iirupForm.setStation(station.getDivisionName() + "," + station.getOfficeName());
else
iirupForm.setStation(station.getDivisionName());
ArrayList<Item> item = new ArrayList<Item>();
for (int i = 0; i < itemIDs.size(); i++) {
Item itemInstance = ApplicationContext.INSTANCE.getItemManager().getItem(Integer.parseInt(itemIDs.get(i)));
item.add(itemInstance);
}
for (int i = 0; i < item.size(); i++) {
Disposal disposal = new Disposal();
if (disposition.get(i).equalsIgnoreCase(Disposal.DESTROYED))
disposal = new Disposal(Disposal.DESTROYED);
if (disposition.get(i).equalsIgnoreCase(Disposal.SOLD_AT_PRIVATE_SALE))
disposal = new Disposal(Disposal.SOLD_AT_PRIVATE_SALE);
IIRUPLine line = new IIRUPLine(iirupForm, item.get(i), Integer.parseInt(quantity.get(i)), Integer.parseInt(yearsInService.get(i)), Float.parseFloat(depreciation.get(i)), disposal,
orNumber.get(i));
line.setAppraisal(appraisal.get(i));
iirupForm.addLine(line);
}
try {
ApplicationContext.INSTANCE.getIIRUPManager().addIIRUP(iirupForm);
System.out.println("successfully saved IIRUP!!");
request.setAttribute("iirupForm", iirupForm);
RequestDispatcher view = request.getRequestDispatcher("IIRUPFlush.do");
view.forward(request, response);
} catch (TransactionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
} catch (TransactionException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
}
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.controller.store.stream.records;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.pravega.common.ObjectBuilder;
import io.pravega.common.io.serialization.RevisionDataInput;
import io.pravega.common.io.serialization.RevisionDataOutput;
import io.pravega.common.io.serialization.VersionedSerializer;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Comparator;
import java.util.stream.Collectors;
/**
* Data class for storing information about stream's truncation point.
*/
@Data
@Slf4j
public class StreamTruncationRecord {
public static final TruncationRecordSerializer SERIALIZER = new TruncationRecordSerializer();
public static final StreamTruncationRecord EMPTY = new StreamTruncationRecord(ImmutableMap.of(),
ImmutableMap.of(), ImmutableSet.of(), ImmutableSet.of(), 0L, false);
/**
* Stream cut that is applied as part of this truncation.
*/
private final ImmutableMap<Long, Long> streamCut;
/**
* If a stream cut spans across multiple epochs then this map captures mapping of segments from the stream cut to
* epochs they were found in closest to truncation point.
* This data structure is used to find active segments wrt a stream cut.
* So for example:
* epoch 0: 0, 1
* epoch 1: 0, 2, 3
* epoch 2: 0, 2, 4, 5
* epoch 3: 0, 4, 5, 6, 7
*
* Following is a valid stream cut {0/offset, 3/offset, 6/offset, 7/offset}
* This spans from epoch 1 till epoch 3. Any request for segments at epoch 1 or 2 or 3 will need to have this stream cut
* applied on it to find segments that are available for consumption.
* Refer to TableHelper.getActiveSegmentsAt
*/
private final ImmutableMap<StreamSegmentRecord, Integer> span;
private final int spanEpochLow;
private final int spanEpochHigh;
/**
* All segments that have been deleted for this stream so far.
*/
private final ImmutableSet<Long> deletedSegments;
/**
* Segments to delete as part of this truncation.
* This is non empty while truncation is ongoing.
* This is reset to empty once truncation completes by calling mergeDeleted method.
*/
private final ImmutableSet<Long> toDelete;
/**
* Size till stream cut.
*/
private final long sizeTill;
private final boolean updating;
@Builder
public StreamTruncationRecord(@NonNull ImmutableMap<Long, Long> streamCut, @NonNull ImmutableMap<StreamSegmentRecord, Integer> span,
@NonNull ImmutableSet<Long> deletedSegments, @NonNull ImmutableSet<Long> toDelete,
long sizeTill, boolean updating) {
this.streamCut = streamCut;
this.span = span;
this.deletedSegments = deletedSegments;
this.toDelete = toDelete;
this.sizeTill = sizeTill;
this.updating = updating;
this.spanEpochLow = span.values().stream().min(Comparator.naturalOrder()).orElse(Integer.MIN_VALUE);
this.spanEpochHigh = span.values().stream().max(Comparator.naturalOrder()).orElse(Integer.MIN_VALUE);
}
/**
* Method to complete a given ongoing truncation record by setting updating flag to false and merging toDelete in deletedSegments.
* @param toComplete record to complete
* @return new record that has the updating flag set to false
*/
public static StreamTruncationRecord complete(StreamTruncationRecord toComplete) {
Preconditions.checkState(toComplete.updating);
ImmutableSet.Builder<Long> builder = ImmutableSet.builder();
builder.addAll(toComplete.deletedSegments);
builder.addAll(toComplete.toDelete);
return StreamTruncationRecord.builder()
.updating(false)
.span(toComplete.span)
.streamCut(toComplete.streamCut)
.deletedSegments(builder.build())
.toDelete(ImmutableSet.of())
.sizeTill(toComplete.sizeTill)
.build();
}
private static class StreamTruncationRecordBuilder implements ObjectBuilder<StreamTruncationRecord> {
}
@SneakyThrows(IOException.class)
public static StreamTruncationRecord fromBytes(final byte[] data) {
return SERIALIZER.deserialize(data);
}
@SneakyThrows(IOException.class)
public byte[] toBytes() {
return SERIALIZER.serialize(this).getCopy();
}
@Override
public String toString() {
return String.format("%s = %s", "streamCut", streamCut.keySet().stream()
.map(key -> key + " : " + streamCut.get(key))
.collect(Collectors.joining(", ", "{", "}"))) + "\n" +
String.format("%s = {%n %s%n}", "span", span.keySet().stream()
.map(streamSegmentRecord ->
String.format("key: %n %s%nvalue: %s", streamSegmentRecord.toString().replace("\n", "\n "),
span.get(streamSegmentRecord)).replace("\n", "\n "))
.collect(Collectors.joining("\n,\n "))) + "\n" +
String.format("%s = %s", "deletedSegments", deletedSegments) + "\n" +
String.format("%s = %s", "toDelete", toDelete) + "\n" +
String.format("%s = %s", "sizeTill", sizeTill) + "\n" +
String.format("%s = %s", "updating", updating);
}
private static class TruncationRecordSerializer
extends VersionedSerializer.WithBuilder<StreamTruncationRecord, StreamTruncationRecordBuilder> {
@Override
protected byte getWriteVersion() {
return 0;
}
@Override
protected void declareVersions() {
version(0).revision(0, this::write00, this::read00);
}
private void read00(RevisionDataInput revisionDataInput,
StreamTruncationRecordBuilder truncationRecordBuilder)
throws IOException {
ImmutableMap.Builder<Long, Long> streamCutBuilder = ImmutableMap.builder();
revisionDataInput.readMap(DataInput::readLong, DataInput::readLong, streamCutBuilder);
truncationRecordBuilder
.streamCut(streamCutBuilder.build());
ImmutableMap.Builder<StreamSegmentRecord, Integer> spanBuilder = ImmutableMap.builder();
revisionDataInput.readMap(StreamSegmentRecord.SERIALIZER::deserialize, DataInput::readInt, spanBuilder);
truncationRecordBuilder.span(spanBuilder.build());
ImmutableSet.Builder<Long> deletedSegmentsBuilder = ImmutableSet.builder();
revisionDataInput.readCollection(DataInput::readLong, deletedSegmentsBuilder);
truncationRecordBuilder.deletedSegments(deletedSegmentsBuilder.build());
ImmutableSet.Builder<Long> toDeleteBuilder = ImmutableSet.builder();
revisionDataInput.readCollection(DataInput::readLong, toDeleteBuilder);
truncationRecordBuilder.toDelete(toDeleteBuilder.build());
truncationRecordBuilder
.sizeTill(revisionDataInput.readLong())
.updating(revisionDataInput.readBoolean());
}
private void write00(StreamTruncationRecord streamTruncationRecord, RevisionDataOutput revisionDataOutput)
throws IOException {
revisionDataOutput.writeMap(streamTruncationRecord.getStreamCut(), DataOutput::writeLong, DataOutput::writeLong);
revisionDataOutput.writeMap(streamTruncationRecord.getSpan(), StreamSegmentRecord.SERIALIZER::serialize, DataOutput::writeInt);
revisionDataOutput.writeCollection(streamTruncationRecord.getDeletedSegments(), DataOutput::writeLong);
revisionDataOutput.writeCollection(streamTruncationRecord.getToDelete(), DataOutput::writeLong);
revisionDataOutput.writeLong(streamTruncationRecord.sizeTill);
revisionDataOutput.writeBoolean(streamTruncationRecord.isUpdating());
}
@Override
protected StreamTruncationRecordBuilder newBuilder() {
return StreamTruncationRecord.builder();
}
}
}
| |
/*
* Copyright 2014 Rodrigo Agerri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package eus.ixa.ixa.pipe.chunk;
import ixa.kaflib.KAFDocument;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.Properties;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
import net.sourceforge.argparse4j.inf.Subparsers;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.util.TrainingParameters;
import org.jdom2.JDOMException;
import com.google.common.io.Files;
import eus.ixa.ixa.pipe.chunk.eval.CrossValidator;
import eus.ixa.ixa.pipe.chunk.eval.Evaluate;
import eus.ixa.ixa.pipe.chunk.train.DefaultTrainer;
import eus.ixa.ixa.pipe.chunk.train.Flags;
import eus.ixa.ixa.pipe.chunk.train.InputOutputUtils;
import eus.ixa.ixa.pipe.chunk.train.Trainer;
/**
* Main class of ixa-pipe-chunk, the chunker of ixa-pipes
* (ixa2.si.ehu.es/ixa-pipes). The annotate method is the main entry point.
*
* @author ragerri
* @version 2014-07-08
*/
public class CLI {
/**
* Get dynamically the version of ixa-pipe-chunk by looking at the MANIFEST
* file.
*/
private final String version = CLI.class.getPackage()
.getImplementationVersion();
/**
* Get the git commit of the ixa-pipe-pos compiled by looking at the MANIFEST
* file.
*/
private final String commit = CLI.class.getPackage()
.getSpecificationVersion();
/**
* The CLI arguments.
*/
private Namespace parsedArguments = null;
/**
* The argument parser.
*/
private ArgumentParser argParser = ArgumentParsers.newArgumentParser(
"ixa-pipe-chunk-" + version + ".jar").description(
"ixa-pipe-chunk-" + version
+ " is a multilingual chunker developed by IXA NLP Group.\n");
/**
* Sub parser instance.
*/
private Subparsers subParsers = argParser.addSubparsers().help(
"sub-command help");
/**
* The parser that manages the tagging sub-command.
*/
private Subparser annotateParser;
/**
* The parser that manages the training sub-command.
*/
private Subparser trainParser;
/**
* The parser that manages the evaluation sub-command.
*/
private Subparser evalParser;
/**
* The parser that manages the cross validation sub-command.
*/
private final Subparser crossValidateParser;
/**
* Parser to start TCP socket for server-client functionality.
*/
private Subparser serverParser;
/**
* Sends queries to the serverParser for annotation.
*/
private Subparser clientParser;
/**
* Default beam size for decoding.
*/
public static final int DEFAULT_BEAM_SIZE = 3;
/**
* Construct a CLI object with the three sub-parsers to manage the command
* line parameters.
*/
public CLI() {
annotateParser = subParsers.addParser("tag").help("Tagging CLI");
loadAnnotateParameters();
trainParser = subParsers.addParser("train").help("Training CLI");
loadTrainingParameters();
evalParser = subParsers.addParser("eval").help("Evaluation CLI");
loadEvalParameters();
this.crossValidateParser = this.subParsers.addParser("cross").help(
"Cross validation CLI");
loadCrossValidateParameters();
serverParser = subParsers.addParser("server").help("Start TCP socket server");
loadServerParameters();
clientParser = subParsers.addParser("client").help("Send queries to the TCP socket server");
loadClientParameters();
}
/**
* The main method.
*
* @param args
* the arguments
* @throws IOException
* the input output exception if not file is available
* @throws JDOMException
* as the input is a NAF file, a JDOMException could be thrown
*/
public static void main(final String[] args) throws IOException,
JDOMException {
CLI cmdLine = new CLI();
cmdLine.parseCLI(args);
}
/**
* Parse the command interface parameters with the argParser.
*
* @param args
* the arguments passed through the CLI
* @throws IOException
* exception if problems with the incoming data
* @throws JDOMException
* if xml exception
*/
public final void parseCLI(final String[] args) throws IOException,
JDOMException {
try {
parsedArguments = argParser.parseArgs(args);
System.err.println("CLI options: " + parsedArguments);
if (args[0].equals("tag")) {
annotate(System.in, System.out);
} else if (args[0].equals("eval")) {
eval();
} else if (args[0].equals("train")) {
train();
} else if (args[0].equals("cross")) {
crossValidate();
} else if (args[0].equals("server")) {
server();
} else if (args[0].equals("client")) {
client(System.in, System.out);
}
} catch (ArgumentParserException e) {
argParser.handleError(e);
System.out.println("Run java -jar target/ixa-pipe-chunk-" + version
+ ".jar (tag|train|eval|cross|server|client) -help for details");
System.exit(1);
}
}
/**
* Main entry point for annotation. Takes System.in as input and outputs
* annotated text via System.out.
*
* @param inputStream
* the input stream
* @param outputStream
* the output stream
* @throws IOException
* the exception if not input is provided
* @throws JDOMException
* if xml exception
*/
public final void annotate(final InputStream inputStream,
final OutputStream outputStream) throws IOException, JDOMException {
String model;
if (parsedArguments.get("model") == null) {
model = "baseline";
} else {
model = parsedArguments.getString("model");
}
String outputFormat = parsedArguments.get("outputFormat");
BufferedReader breader = null;
BufferedWriter bwriter = null;
breader = new BufferedReader(new InputStreamReader(System.in, "UTF-8"));
bwriter = new BufferedWriter(new OutputStreamWriter(System.out, "UTF-8"));
final KAFDocument kaf = KAFDocument.createFromStream(breader);
// language
String lang;
if (this.parsedArguments.getString("language") != null) {
lang = this.parsedArguments.getString("language");
if (!kaf.getLang().equalsIgnoreCase(lang)) {
System.err.println("Language parameter in NAF and CLI do not match!!");
System.exit(1);
}
} else {
lang = kaf.getLang();
}
final Properties properties = setAnnotateProperties(model, lang);
final Annotate annotator = new Annotate(properties);
// annotate to KAF
if (outputFormat.equalsIgnoreCase("conll")) {
bwriter.write(annotator.annotateChunksToCoNLL(kaf));
} else {
KAFDocument.LinguisticProcessor newLp = kaf.addLinguisticProcessor(
"terms", "ixa-pipe-chunk-" + Files.getNameWithoutExtension(model)
+ this.version + "-" + this.commit);
newLp.setBeginTimestamp();
annotator.chunkToKAF(kaf);
newLp.setEndTimestamp();
bwriter.write(kaf.toString());
}
bwriter.close();
breader.close();
}
/**
* Generate the annotation parameter of the CLI.
*/
private void loadAnnotateParameters() {
annotateParser.addArgument("-m", "--model").required(true)
.help("Choose model to perform chunk tagging.");
annotateParser.addArgument("-l", "--lang").choices("en").required(false)
.help("Choose a language to perform annotation with ixa-pipe-chunk.");
annotateParser
.addArgument("-o", "--outputFormat")
.required(false)
.choices("naf","conll")
.setDefault("naf")
.help(
"Choose between NAF and conll format; it defaults to NAF.\n");
}
/**
* Main entry point for training.
*
* @throws IOException
* throws an exception if errors in the various file inputs.
*/
public final void train() throws IOException {
// load training parameters file
final String paramFile = this.parsedArguments.getString("params");
final TrainingParameters params = InputOutputUtils
.loadTrainingParameters(paramFile);
String outModel = null;
if (params.getSettings().get("OutputModel") == null
|| params.getSettings().get("OutputModel").length() == 0) {
outModel = Files.getNameWithoutExtension(paramFile) + ".bin";
params.put("OutputModel", outModel);
} else {
outModel = Flags.getModel(params);
}
final Trainer chunkerTrainer = new DefaultTrainer(params);
final ChunkerModel trainedModel = chunkerTrainer.train(params);
CmdLineUtil.writeModel("ixa-pipe-chunk", new File(outModel), trainedModel);
}
/**
* Loads the parameters for the training CLI.
*/
private void loadTrainingParameters() {
this.trainParser.addArgument("-p", "--params").required(true)
.help("Load the training parameters file\n");
}
/**
* Main entry point for evaluation.
*
* @throws IOException
* the io exception thrown if errors with paths are present
*/
public final void eval() throws IOException {
String testFile = parsedArguments.getString("testSet");
String model = parsedArguments.getString("model");
Evaluate evaluator = new Evaluate(testFile, model);
if (parsedArguments.getString("evalReport") != null) {
if (parsedArguments.getString("evalReport").equalsIgnoreCase("brief")) {
evaluator.evaluate();
} else if (parsedArguments.getString("evalReport").equalsIgnoreCase(
"error")) {
evaluator.evalError();
} else if (parsedArguments.getString("evalReport").equalsIgnoreCase(
"detailed")) {
evaluator.detailEvaluate();
}
} else {
evaluator.detailEvaluate();
}
}
/**
* Load the evaluation parameters of the CLI.
*/
public final void loadEvalParameters() {
evalParser.addArgument("-m", "--model").required(true).help("Choose model");
evalParser.addArgument("-t", "--testSet").required(true)
.help("Input testset for evaluation");
evalParser.addArgument("--evalReport").required(false)
.choices("brief", "detailed", "error")
.help("Choose type of evaluation report; defaults to detailed");
}
/**
* Main access to the cross validation.
*
* @throws IOException
* input output exception if problems with corpora
*/
public final void crossValidate() throws IOException {
final String paramFile = this.parsedArguments.getString("params");
final TrainingParameters params = InputOutputUtils
.loadTrainingParameters(paramFile);
final CrossValidator crossValidator = new CrossValidator(params);
crossValidator.crossValidate(params);
}
/**
* Create the main parameters available for training NERC models.
*/
private void loadCrossValidateParameters() {
this.crossValidateParser.addArgument("-p", "--params").required(true)
.help("Load the Cross validation parameters file\n");
}
/**
* Set up the TCP socket for annotation.
*/
public final void server() {
// load parameters into a properties
String port = parsedArguments.getString("port");
String model = parsedArguments.getString("model");
String outputFormat = parsedArguments.getString("outputFormat");
// language parameter
String lang = parsedArguments.getString("language");
Properties serverproperties = setServerProperties(port, model, lang, outputFormat);
new ChunkerServer(serverproperties);
}
/**
* The client to query the TCP server for annotation.
*
* @param inputStream
* the stdin
* @param outputStream
* stdout
*/
public final void client(final InputStream inputStream,
final OutputStream outputStream) {
String host = parsedArguments.getString("host");
String port = parsedArguments.getString("port");
try (Socket socketClient = new Socket(host, Integer.parseInt(port));
BufferedReader inFromUser = new BufferedReader(new InputStreamReader(
System.in, "UTF-8"));
BufferedWriter outToUser = new BufferedWriter(new OutputStreamWriter(
System.out, "UTF-8"));
BufferedWriter outToServer = new BufferedWriter(new OutputStreamWriter(
socketClient.getOutputStream(), "UTF-8"));
BufferedReader inFromServer = new BufferedReader(new InputStreamReader(
socketClient.getInputStream(), "UTF-8"));) {
// send data to server socket
StringBuilder inText = new StringBuilder();
String line;
while ((line = inFromUser.readLine()) != null) {
inText.append(line).append("\n");
}
inText.append("<ENDOFDOCUMENT>").append("\n");
outToServer.write(inText.toString());
outToServer.flush();
// get data from server
StringBuilder sb = new StringBuilder();
String kafString;
while ((kafString = inFromServer.readLine()) != null) {
sb.append(kafString).append("\n");
}
outToUser.write(sb.toString());
} catch (UnsupportedEncodingException e) {
//this cannot happen but...
throw new AssertionError("UTF-8 not supported");
} catch (UnknownHostException e) {
System.err.println("ERROR: Unknown hostname or IP address!");
System.exit(1);
} catch (NumberFormatException e) {
System.err.println("Port number not correct!");
System.exit(1);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Create the available parameters for POS tagging.
*/
private void loadServerParameters() {
serverParser.addArgument("-p", "--port")
.required(true)
.help("Port to be assigned to the server.\n");
serverParser.addArgument("-m", "--model")
.required(true)
.help("It is required to provide a chunker model.");
serverParser.addArgument("-l", "--language")
.choices("en", "eu")
.required(true)
.help("Choose a language to perform annotation with ixa-pipe-chunk.");
serverParser.addArgument("-o", "--outputFormat")
.required(false)
.choices("naf", "conll00")
.setDefault(Flags.DEFAULT_OUTPUT_FORMAT)
.help("Choose output format; it defaults to NAF.\n");
}
/**
* Load the client parameters.
*/
private void loadClientParameters() {
clientParser.addArgument("-p", "--port")
.required(true)
.help("Port of the TCP server.\n");
clientParser.addArgument("--host")
.required(false)
.setDefault(Flags.DEFAULT_HOSTNAME)
.help("Hostname or IP where the TCP server is running.\n");
}
/**
* Generate Properties objects for CLI usage.
*
* @param model
* the model to perform the annotation
* @param language
* the language
* @return a properties object
*/
private Properties setAnnotateProperties(final String model,
final String language) {
final Properties annotateProperties = new Properties();
annotateProperties.setProperty("model", model);
annotateProperties.setProperty("language", language);
return annotateProperties;
}
/**
* Generate properties for server usage.
* @param port the port
* @param model the model
* @param language the language
* @param outputFormat the output format
* @return the properties object
*/
private Properties setServerProperties(String port, String model, String language, String outputFormat) {
Properties serverProperties = new Properties();
serverProperties.setProperty("port", port);
serverProperties.setProperty("model", model);
serverProperties.setProperty("language", language);
serverProperties.setProperty("outputFormat", outputFormat);
return serverProperties;
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.internal.operators;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.MockitoAnnotations;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Observer;
import rx.Subscriber;
import rx.Subscription;
import rx.schedulers.TestScheduler;
import rx.subjects.PublishSubject;
public class OperatorTimeoutTests {
private PublishSubject<String> underlyingSubject;
private TestScheduler testScheduler;
private Observable<String> withTimeout;
private static final long TIMEOUT = 3;
private static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
underlyingSubject = PublishSubject.create();
testScheduler = new TestScheduler();
withTimeout = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler);
}
@Test
public void shouldNotTimeoutIfOnNextWithinTimeout() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
verify(observer).onNext("One");
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
verify(observer, never()).onError(any(Throwable.class));
subscription.unsubscribe();
}
@Test
public void shouldNotTimeoutIfSecondOnNextWithinTimeout() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("Two");
verify(observer).onNext("Two");
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
verify(observer, never()).onError(any(Throwable.class));
subscription.unsubscribe();
}
@Test
public void shouldTimeoutIfOnNextNotWithinTimeout() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(TIMEOUT + 1, TimeUnit.SECONDS);
verify(observer).onError(any(TimeoutException.class));
subscription.unsubscribe();
}
@Test
public void shouldTimeoutIfSecondOnNextNotWithinTimeout() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
verify(observer).onNext("One");
testScheduler.advanceTimeBy(TIMEOUT + 1, TimeUnit.SECONDS);
verify(observer).onError(any(TimeoutException.class));
subscription.unsubscribe();
}
@Test
public void shouldCompleteIfUnderlyingComletes() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onCompleted();
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
verify(observer).onCompleted();
verify(observer, never()).onError(any(Throwable.class));
subscription.unsubscribe();
}
@Test
public void shouldErrorIfUnderlyingErrors() {
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = withTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onError(new UnsupportedOperationException());
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
verify(observer).onError(any(UnsupportedOperationException.class));
subscription.unsubscribe();
}
@Test
public void shouldSwitchToOtherIfOnNextNotWithinTimeout() {
Observable<String> other = Observable.just("a", "b", "c");
Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, other, testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = source.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
testScheduler.advanceTimeBy(4, TimeUnit.SECONDS);
underlyingSubject.onNext("Two");
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext("One");
inOrder.verify(observer, times(1)).onNext("a");
inOrder.verify(observer, times(1)).onNext("b");
inOrder.verify(observer, times(1)).onNext("c");
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
subscription.unsubscribe();
}
@Test
public void shouldSwitchToOtherIfOnErrorNotWithinTimeout() {
Observable<String> other = Observable.just("a", "b", "c");
Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, other, testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = source.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
testScheduler.advanceTimeBy(4, TimeUnit.SECONDS);
underlyingSubject.onError(new UnsupportedOperationException());
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext("One");
inOrder.verify(observer, times(1)).onNext("a");
inOrder.verify(observer, times(1)).onNext("b");
inOrder.verify(observer, times(1)).onNext("c");
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
subscription.unsubscribe();
}
@Test
public void shouldSwitchToOtherIfOnCompletedNotWithinTimeout() {
Observable<String> other = Observable.just("a", "b", "c");
Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, other, testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = source.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
testScheduler.advanceTimeBy(4, TimeUnit.SECONDS);
underlyingSubject.onCompleted();
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext("One");
inOrder.verify(observer, times(1)).onNext("a");
inOrder.verify(observer, times(1)).onNext("b");
inOrder.verify(observer, times(1)).onNext("c");
inOrder.verify(observer, times(1)).onCompleted();
inOrder.verifyNoMoreInteractions();
subscription.unsubscribe();
}
@Test
public void shouldSwitchToOtherAndCanBeUnsubscribedIfOnNextNotWithinTimeout() {
PublishSubject<String> other = PublishSubject.create();
Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, other, testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
Subscription subscription = source.subscribe(observer);
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
underlyingSubject.onNext("One");
testScheduler.advanceTimeBy(4, TimeUnit.SECONDS);
underlyingSubject.onNext("Two");
other.onNext("a");
other.onNext("b");
subscription.unsubscribe();
// The following messages should not be delivered.
other.onNext("c");
other.onNext("d");
other.onCompleted();
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onNext("One");
inOrder.verify(observer, times(1)).onNext("a");
inOrder.verify(observer, times(1)).onNext("b");
inOrder.verifyNoMoreInteractions();
}
@Test
public void shouldTimeoutIfSynchronizedObservableEmitFirstOnNextNotWithinTimeout()
throws InterruptedException {
final CountDownLatch exit = new CountDownLatch(1);
final CountDownLatch timeoutSetuped = new CountDownLatch(1);
@SuppressWarnings("unchecked")
final Observer<String> observer = mock(Observer.class);
new Thread(new Runnable() {
@Override
public void run() {
Observable.create(new OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> subscriber) {
try {
timeoutSetuped.countDown();
exit.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
subscriber.onNext("a");
subscriber.onCompleted();
}
}).timeout(1, TimeUnit.SECONDS, testScheduler)
.subscribe(observer);
}
}).start();
timeoutSetuped.await();
testScheduler.advanceTimeBy(2, TimeUnit.SECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer, times(1)).onError(isA(TimeoutException.class));
inOrder.verifyNoMoreInteractions();
exit.countDown(); // exit the thread
}
@Test
public void shouldUnsubscribeFromUnderlyingSubscriptionOnTimeout() throws InterruptedException {
// From https://github.com/Netflix/RxJava/pull/951
final Subscription s = mock(Subscription.class);
Observable<String> never = Observable.create(new OnSubscribe<String>() {
public void call(Subscriber<? super String> subscriber) {
subscriber.add(s);
}
});
TestScheduler testScheduler = new TestScheduler();
Observable<String> observableWithTimeout = never.timeout(1000, TimeUnit.MILLISECONDS, testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
observableWithTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer).onError(isA(TimeoutException.class));
inOrder.verifyNoMoreInteractions();
verify(s, times(1)).unsubscribe();
}
@Test
public void shouldUnsubscribeFromUnderlyingSubscriptionOnImmediatelyComplete() {
// From https://github.com/Netflix/RxJava/pull/951
final Subscription s = mock(Subscription.class);
Observable<String> immediatelyComplete = Observable.create(new OnSubscribe<String>() {
public void call(Subscriber<? super String> subscriber) {
subscriber.add(s);
subscriber.onCompleted();
}
});
TestScheduler testScheduler = new TestScheduler();
Observable<String> observableWithTimeout = immediatelyComplete.timeout(1000, TimeUnit.MILLISECONDS,
testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
observableWithTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer).onCompleted();
inOrder.verifyNoMoreInteractions();
verify(s, times(1)).unsubscribe();
}
@Test
public void shouldUnsubscribeFromUnderlyingSubscriptionOnImmediatelyErrored() throws InterruptedException {
// From https://github.com/Netflix/RxJava/pull/951
final Subscription s = mock(Subscription.class);
Observable<String> immediatelyError = Observable.create(new OnSubscribe<String>() {
public void call(Subscriber<? super String> subscriber) {
subscriber.add(s);
subscriber.onError(new IOException("Error"));
}
});
TestScheduler testScheduler = new TestScheduler();
Observable<String> observableWithTimeout = immediatelyError.timeout(1000, TimeUnit.MILLISECONDS,
testScheduler);
@SuppressWarnings("unchecked")
Observer<String> observer = mock(Observer.class);
observableWithTimeout.subscribe(observer);
testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS);
InOrder inOrder = inOrder(observer);
inOrder.verify(observer).onError(isA(IOException.class));
inOrder.verifyNoMoreInteractions();
verify(s, times(1)).unsubscribe();
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.spi;
import com.hazelcast.cache.impl.ICacheService;
import com.hazelcast.cache.impl.JCacheDetector;
import com.hazelcast.cardinality.impl.CardinalityEstimatorService;
import com.hazelcast.client.ClientExtension;
import com.hazelcast.client.HazelcastClientOfflineException;
import com.hazelcast.client.LoadBalancer;
import com.hazelcast.client.cache.impl.ClientCacheProxyFactory;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ProxyFactoryConfig;
import com.hazelcast.client.impl.HazelcastClientInstanceImpl;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.codec.ClientAddDistributedObjectListenerCodec;
import com.hazelcast.client.impl.protocol.codec.ClientCreateProxyCodec;
import com.hazelcast.client.impl.protocol.codec.ClientRemoveDistributedObjectListenerCodec;
import com.hazelcast.client.proxy.ClientAtomicLongProxy;
import com.hazelcast.client.proxy.ClientAtomicReferenceProxy;
import com.hazelcast.client.proxy.ClientCardinalityEstimatorProxy;
import com.hazelcast.client.proxy.ClientCountDownLatchProxy;
import com.hazelcast.client.proxy.ClientDurableExecutorServiceProxy;
import com.hazelcast.client.proxy.ClientExecutorServiceProxy;
import com.hazelcast.client.proxy.ClientReliableIdGeneratorProxy;
import com.hazelcast.client.proxy.ClientIdGeneratorProxy;
import com.hazelcast.client.proxy.ClientListProxy;
import com.hazelcast.client.proxy.ClientLockProxy;
import com.hazelcast.client.proxy.ClientMapReduceProxy;
import com.hazelcast.client.proxy.ClientMultiMapProxy;
import com.hazelcast.client.proxy.ClientQueueProxy;
import com.hazelcast.client.proxy.ClientReliableTopicProxy;
import com.hazelcast.client.proxy.ClientReplicatedMapProxy;
import com.hazelcast.client.proxy.ClientRingbufferProxy;
import com.hazelcast.client.proxy.ClientScheduledExecutorProxy;
import com.hazelcast.client.proxy.ClientSemaphoreProxy;
import com.hazelcast.client.proxy.ClientSetProxy;
import com.hazelcast.client.proxy.ClientTopicProxy;
import com.hazelcast.client.proxy.txn.xa.XAResourceProxy;
import com.hazelcast.client.spi.impl.AbstractClientInvocationService;
import com.hazelcast.client.spi.impl.ClientInvocation;
import com.hazelcast.client.spi.impl.ClientProxyFactoryWithContext;
import com.hazelcast.client.spi.impl.ClientServiceNotFoundException;
import com.hazelcast.client.spi.impl.ListenerMessageCodec;
import com.hazelcast.client.spi.impl.listener.LazyDistributedObjectEvent;
import com.hazelcast.collection.impl.list.ListService;
import com.hazelcast.collection.impl.queue.QueueService;
import com.hazelcast.collection.impl.set.SetService;
import com.hazelcast.concurrent.atomiclong.AtomicLongService;
import com.hazelcast.concurrent.atomicreference.AtomicReferenceService;
import com.hazelcast.concurrent.countdownlatch.CountDownLatchService;
import com.hazelcast.reliableidgen.impl.ReliableIdGeneratorService;
import com.hazelcast.concurrent.idgen.IdGeneratorService;
import com.hazelcast.concurrent.lock.LockServiceImpl;
import com.hazelcast.concurrent.semaphore.SemaphoreService;
import com.hazelcast.config.ListenerConfig;
import com.hazelcast.core.DistributedObject;
import com.hazelcast.core.DistributedObjectEvent;
import com.hazelcast.core.DistributedObjectListener;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IAtomicLong;
import com.hazelcast.core.Member;
import com.hazelcast.core.OperationTimeoutException;
import com.hazelcast.durableexecutor.impl.DistributedDurableExecutorService;
import com.hazelcast.executor.impl.DistributedExecutorService;
import com.hazelcast.map.impl.MapService;
import com.hazelcast.mapreduce.impl.MapReduceService;
import com.hazelcast.multimap.impl.MultiMapService;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.ClassLoaderUtil;
import com.hazelcast.replicatedmap.impl.ReplicatedMapService;
import com.hazelcast.ringbuffer.impl.RingbufferService;
import com.hazelcast.scheduledexecutor.impl.DistributedScheduledExecutorService;
import com.hazelcast.spi.DistributedObjectNamespace;
import com.hazelcast.spi.ObjectNamespace;
import com.hazelcast.topic.impl.TopicService;
import com.hazelcast.topic.impl.reliable.ReliableTopicService;
import com.hazelcast.transaction.impl.xa.XAService;
import com.hazelcast.util.EmptyStatement;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import static com.hazelcast.util.ExceptionUtil.rethrow;
import static com.hazelcast.util.ServiceLoader.classIterator;
/**
* The ProxyManager handles client proxy instantiation and retrieval at start and runtime by registering
* corresponding service manager names and their {@link com.hazelcast.client.spi.ClientProxyFactory}s.
*/
@SuppressWarnings({"checkstyle:classfanoutcomplexity", "checkstyle:classdataabstractioncoupling"})
public final class ProxyManager {
private static final String PROVIDER_ID = ClientProxyDescriptorProvider.class.getCanonicalName();
private static final Class[] LEGACY_CONSTRUCTOR_ARGUMENT_TYPES = new Class[]{String.class, String.class};
private static final Class[] CONSTRUCTOR_ARGUMENT_TYPES = new Class[]{String.class, String.class, ClientContext.class};
private final ConcurrentMap<String, ClientProxyFactory> proxyFactories = new ConcurrentHashMap<String, ClientProxyFactory>();
private final ConcurrentMap<ObjectNamespace, ClientProxyFuture> proxies
= new ConcurrentHashMap<ObjectNamespace, ClientProxyFuture>();
private final ListenerMessageCodec distributedObjectListenerCodec = new ListenerMessageCodec() {
@Override
public ClientMessage encodeAddRequest(boolean localOnly) {
return ClientAddDistributedObjectListenerCodec.encodeRequest(localOnly);
}
@Override
public String decodeAddResponse(ClientMessage clientMessage) {
return ClientAddDistributedObjectListenerCodec.decodeResponse(clientMessage).response;
}
@Override
public ClientMessage encodeRemoveRequest(String realRegistrationId) {
return ClientRemoveDistributedObjectListenerCodec.encodeRequest(realRegistrationId);
}
@Override
public boolean decodeRemoveResponse(ClientMessage clientMessage) {
return ClientRemoveDistributedObjectListenerCodec.decodeResponse(clientMessage).response;
}
};
private final HazelcastClientInstanceImpl client;
private ClientContext context;
private long invocationRetryPauseMillis;
private long invocationTimeoutMillis;
public ProxyManager(HazelcastClientInstanceImpl client) {
this.client = client;
List<ListenerConfig> listenerConfigs = client.getClientConfig().getListenerConfigs();
if (listenerConfigs != null && !listenerConfigs.isEmpty()) {
for (ListenerConfig listenerConfig : listenerConfigs) {
if (listenerConfig.getImplementation() instanceof DistributedObjectListener) {
addDistributedObjectListener((DistributedObjectListener) listenerConfig.getImplementation());
}
}
}
}
@SuppressWarnings("checkstyle:methodlength")
public void init(ClientConfig config, ClientContext clientContext) {
context = clientContext;
// register defaults
register(MapService.SERVICE_NAME, createServiceProxyFactory(MapService.class));
if (JCacheDetector.isJCacheAvailable(config.getClassLoader())) {
register(ICacheService.SERVICE_NAME, new ClientCacheProxyFactory(client));
}
register(QueueService.SERVICE_NAME, ClientQueueProxy.class);
register(MultiMapService.SERVICE_NAME, ClientMultiMapProxy.class);
register(ListService.SERVICE_NAME, ClientListProxy.class);
register(SetService.SERVICE_NAME, ClientSetProxy.class);
register(SemaphoreService.SERVICE_NAME, ClientSemaphoreProxy.class);
register(TopicService.SERVICE_NAME, ClientTopicProxy.class);
register(AtomicLongService.SERVICE_NAME, ClientAtomicLongProxy.class);
register(AtomicReferenceService.SERVICE_NAME, ClientAtomicReferenceProxy.class);
register(DistributedExecutorService.SERVICE_NAME, ClientExecutorServiceProxy.class);
register(DistributedDurableExecutorService.SERVICE_NAME, ClientDurableExecutorServiceProxy.class);
register(LockServiceImpl.SERVICE_NAME, ClientLockProxy.class);
register(CountDownLatchService.SERVICE_NAME, ClientCountDownLatchProxy.class);
register(MapReduceService.SERVICE_NAME, ClientMapReduceProxy.class);
register(ReplicatedMapService.SERVICE_NAME, ClientReplicatedMapProxy.class);
register(XAService.SERVICE_NAME, XAResourceProxy.class);
register(RingbufferService.SERVICE_NAME, ClientRingbufferProxy.class);
register(ReliableTopicService.SERVICE_NAME, new ClientProxyFactoryWithContext() {
@Override
public ClientProxy create(String id, ClientContext context) {
return new ClientReliableTopicProxy(id, context, client);
}
});
register(IdGeneratorService.SERVICE_NAME, new ClientProxyFactoryWithContext() {
@Override
public ClientProxy create(String id, ClientContext context) {
IAtomicLong atomicLong = client.getAtomicLong(IdGeneratorService.ATOMIC_LONG_NAME + id);
return new ClientIdGeneratorProxy(IdGeneratorService.SERVICE_NAME, id, context, atomicLong);
}
});
register(ReliableIdGeneratorService.SERVICE_NAME, ClientReliableIdGeneratorProxy.class);
register(CardinalityEstimatorService.SERVICE_NAME, ClientCardinalityEstimatorProxy.class);
register(DistributedScheduledExecutorService.SERVICE_NAME, ClientScheduledExecutorProxy.class);
ClassLoader classLoader = config.getClassLoader();
for (ProxyFactoryConfig proxyFactoryConfig : config.getProxyFactoryConfigs()) {
try {
ClientProxyFactory clientProxyFactory = proxyFactoryConfig.getFactoryImpl();
if (clientProxyFactory == null) {
String className = proxyFactoryConfig.getClassName();
clientProxyFactory = ClassLoaderUtil.newInstance(classLoader, className);
}
register(proxyFactoryConfig.getService(), clientProxyFactory);
} catch (Exception e) {
throw rethrow(e);
}
}
readProxyDescriptors();
AbstractClientInvocationService invocationService = (AbstractClientInvocationService) client.getInvocationService();
invocationTimeoutMillis = invocationService.getInvocationTimeoutMillis();
invocationRetryPauseMillis = invocationService.getInvocationRetryPauseMillis();
}
private void readProxyDescriptors() {
try {
ClassLoader classLoader = client.getClientConfig().getClassLoader();
Iterator<Class<ClientProxyDescriptorProvider>> iter = classIterator(ClientProxyDescriptorProvider.class,
PROVIDER_ID, classLoader);
while (iter.hasNext()) {
Class<ClientProxyDescriptorProvider> clazz = iter.next();
Constructor<ClientProxyDescriptorProvider> constructor = clazz.getDeclaredConstructor();
ClientProxyDescriptorProvider provider = constructor.newInstance();
ClientProxyDescriptor[] services = provider.createClientProxyDescriptors();
for (ClientProxyDescriptor serviceDescriptor : services) {
register(serviceDescriptor.getServiceName(), serviceDescriptor.getClientProxyClass());
}
}
} catch (Exception e) {
throw rethrow(e);
}
}
/**
* Creates a {@code ClientProxyFactory} for the supplied service class. Currently only the {@link MapService} is supported.
*
* @param service service for the proxy to create.
* @return {@code ClientProxyFactory} for the service.
* @throws java.lang.IllegalArgumentException if service is not known. Currently only the {@link MapService} is known
*/
private <T> ClientProxyFactory createServiceProxyFactory(Class<T> service) {
ClientExtension clientExtension = client.getClientExtension();
return clientExtension.createServiceProxyFactory(service);
}
public ClientContext getContext() {
return context;
}
public HazelcastInstance getHazelcastInstance() {
return client;
}
public ClientProxyFactory getClientProxyFactory(String serviceName) {
return proxyFactories.get(serviceName);
}
public void register(String serviceName, ClientProxyFactory factory) {
if (proxyFactories.putIfAbsent(serviceName, factory) != null) {
throw new IllegalArgumentException("Factory for service " + serviceName + " is already registered!");
}
}
public void register(final String serviceName, final Class<? extends ClientProxy> proxyType) {
try {
register(serviceName, new ClientProxyFactoryWithContext() {
@Override
public ClientProxy create(String id, ClientContext context) {
return instantiateClientProxy(proxyType, serviceName, context, id);
}
});
} catch (Exception e) {
throw new HazelcastException("Factory for service " + serviceName + " could not be created for " + proxyType, e);
}
}
public ClientProxy getOrCreateProxy(String service, String id) {
final ObjectNamespace ns = new DistributedObjectNamespace(service, id);
ClientProxyFuture proxyFuture = proxies.get(ns);
if (proxyFuture != null) {
return proxyFuture.get();
}
ClientProxyFactory factory = proxyFactories.get(service);
if (factory == null) {
throw new ClientServiceNotFoundException("No factory registered for service: " + service);
}
proxyFuture = new ClientProxyFuture();
ClientProxyFuture current = proxies.putIfAbsent(ns, proxyFuture);
if (current != null) {
return current.get();
}
try {
ClientProxy clientProxy = createClientProxy(id, factory);
initializeWithRetry(clientProxy);
proxyFuture.set(clientProxy);
return clientProxy;
} catch (Throwable e) {
proxies.remove(ns);
proxyFuture.set(e);
throw rethrow(e);
}
}
private ClientProxy createClientProxy(String id, ClientProxyFactory factory) {
if (factory instanceof ClientProxyFactoryWithContext) {
return ((ClientProxyFactoryWithContext) factory).create(id, context);
}
return factory.create(id)
.setContext(context);
}
public void removeProxy(String service, String id) {
final ObjectNamespace ns = new DistributedObjectNamespace(service, id);
proxies.remove(ns);
}
private void initializeWithRetry(ClientProxy clientProxy) throws Exception {
long startMillis = System.currentTimeMillis();
while (System.currentTimeMillis() < startMillis + invocationTimeoutMillis) {
try {
initialize(clientProxy);
return;
} catch (Exception e) {
boolean retryable = isRetryable(e);
if (!retryable && e instanceof ExecutionException) {
retryable = isRetryable(e.getCause());
}
if (retryable) {
sleepForProxyInitRetry();
} else {
throw e;
}
}
}
long elapsedTime = System.currentTimeMillis() - startMillis;
throw new OperationTimeoutException("Initializing " + clientProxy.getServiceName() + ":"
+ clientProxy.getName() + " is timed out after " + elapsedTime
+ " ms. Configured invocation timeout is " + invocationTimeoutMillis + " ms");
}
private boolean isRetryable(final Throwable t) {
return ClientInvocation.isRetrySafeException(t);
}
private void sleepForProxyInitRetry() {
try {
Thread.sleep(invocationRetryPauseMillis);
} catch (InterruptedException ignored) {
EmptyStatement.ignore(ignored);
}
}
private void initialize(ClientProxy clientProxy) throws Exception {
Address initializationTarget = findNextAddressToSendCreateRequest();
if (initializationTarget == null) {
throw new IOException("Not able to find a member to create proxy on!");
}
ClientMessage clientMessage = ClientCreateProxyCodec.encodeRequest(clientProxy.getDistributedObjectName(),
clientProxy.getServiceName(), initializationTarget);
new ClientInvocation(client, clientMessage, clientProxy.getServiceName(), initializationTarget).invoke().get();
clientProxy.onInitialize();
}
public Address findNextAddressToSendCreateRequest() {
int clusterSize = client.getClientClusterService().getSize();
if (clusterSize == 0) {
throw new HazelcastClientOfflineException("Client connecting to cluster");
}
Member liteMember = null;
final LoadBalancer loadBalancer = client.getLoadBalancer();
for (int i = 0; i < clusterSize; i++) {
Member member = loadBalancer.next();
if (member != null && !member.isLiteMember()) {
return member.getAddress();
} else if (liteMember == null) {
liteMember = member;
}
}
return liteMember != null ? liteMember.getAddress() : null;
}
public Collection<? extends DistributedObject> getDistributedObjects() {
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
for (ClientProxyFuture future : proxies.values()) {
objects.add(future.get());
}
return objects;
}
public void destroy() {
for (ClientProxyFuture future : proxies.values()) {
future.get().onShutdown();
}
proxies.clear();
}
public String addDistributedObjectListener(final DistributedObjectListener listener) {
final EventHandler<ClientMessage> eventHandler = new DistributedObjectEventHandler(listener, this);
return client.getListenerService().registerListener(distributedObjectListenerCodec, eventHandler);
}
private final class DistributedObjectEventHandler extends ClientAddDistributedObjectListenerCodec.AbstractEventHandler
implements EventHandler<ClientMessage> {
private final DistributedObjectListener listener;
private ProxyManager proxyManager;
private DistributedObjectEventHandler(DistributedObjectListener listener, ProxyManager proxyManager) {
this.listener = listener;
this.proxyManager = proxyManager;
}
@Override
public void handle(String name, String serviceName, String eventTypeName) {
final ObjectNamespace ns = new DistributedObjectNamespace(serviceName, name);
ClientProxyFuture future = proxies.get(ns);
ClientProxy proxy = future == null ? null : future.get();
DistributedObjectEvent.EventType eventType = DistributedObjectEvent.EventType.valueOf(eventTypeName);
LazyDistributedObjectEvent event = new LazyDistributedObjectEvent(eventType, serviceName, name, proxy,
proxyManager);
if (DistributedObjectEvent.EventType.CREATED.equals(eventType)) {
listener.distributedObjectCreated(event);
} else if (DistributedObjectEvent.EventType.DESTROYED.equals(eventType)) {
listener.distributedObjectDestroyed(event);
}
}
@Override
public void beforeListenerRegister() {
}
@Override
public void onListenerRegister() {
}
}
public boolean removeDistributedObjectListener(String id) {
return client.getListenerService().deregisterListener(id);
}
private static class ClientProxyFuture {
volatile Object proxy;
ClientProxy get() {
if (proxy == null) {
boolean interrupted = false;
synchronized (this) {
while (proxy == null) {
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
if (proxy instanceof Throwable) {
throw rethrow((Throwable) proxy);
}
return (ClientProxy) proxy;
}
void set(Object o) {
if (o == null) {
throw new IllegalArgumentException();
}
synchronized (this) {
proxy = o;
notifyAll();
}
}
}
private <T> T instantiateClientProxy(Class<T> proxyType, String serviceName, ClientContext context, String id) {
try {
try {
Constructor<T> constructor = proxyType.getConstructor(CONSTRUCTOR_ARGUMENT_TYPES);
return constructor.newInstance(serviceName, id, context);
} catch (NoSuchMethodException e) {
Constructor<T> constructor = proxyType.getConstructor(LEGACY_CONSTRUCTOR_ARGUMENT_TYPES);
return constructor.newInstance(serviceName, id);
}
} catch (Exception e) {
throw rethrow(e);
}
}
}
| |
/*
The MIT License (MIT)
Copyright (c) 2016
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package altermarkive.guardian;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.Uri;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.provider.Settings.Secure;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class Positioning implements LocationListener {
private static Positioning singleton = null;
private Object lock = new Object();
private final Context context;
private Location gps;
private Location network;
private long once = 0;
private boolean replied = true;
private SimpleDateFormat format = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss");
public static void trigger() {
if (null != singleton) {
singleton.run();
}
}
private Positioning(Context context) {
this.context = context;
LocationManager manager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
gps = manager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
network = manager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
reset();
}
public static void initiate(Context context) {
if (null == singleton) {
singleton = new Positioning(context);
}
}
private void run() {
enforce(context);
synchronized (lock) {
LocationManager manager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
manager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this);
manager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, this);
once = System.currentTimeMillis();
replied = false;
}
}
private void reset() {
LocationManager manager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
manager.removeUpdates(this);
int meters10 = 10;
int minutes10 = 10 * 60 * 1000;
manager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, minutes10, meters10, this);
}
private static void enforce(Context context) {
enforceWiFi(context);
enforceGPS(context);
}
private static void enforceWiFi(Context context) {
WifiManager wifi = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
wifi.setWifiEnabled(true);
}
@SuppressWarnings("deprecation")
private static void enforceGPS(Context context) {
LocationManager manager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (manager.isProviderEnabled(LocationManager.GPS_PROVIDER)) {
return;
}
boolean stealth = false;
try {
PackageManager packages = context.getPackageManager();
PackageInfo info = packages.getPackageInfo("com.android.settings", PackageManager.GET_RECEIVERS);
if (info != null) {
for (ActivityInfo receiver : info.receivers) {
if (receiver.name.equals("com.android.settings.widget.SettingsAppWidgetProvider") && receiver.exported) {
stealth = true;
}
}
}
} catch (NameNotFoundException ignored) {
}
if (stealth) {
String provider = Secure.getString(context.getContentResolver(), Secure.LOCATION_PROVIDERS_ALLOWED);
if (!provider.contains("gps")) {
Intent poke = new Intent();
poke.setClassName("com.android.settings", "com.android.settings.widget.SettingsAppWidgetProvider");
poke.addCategory(Intent.CATEGORY_ALTERNATIVE);
poke.setData(Uri.parse("3"));
context.sendBroadcast(poke);
}
} else {
Intent intent = new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
}
private double accuracy(Location location) {
if (null != location && location.hasAccuracy()) {
return (location.getAccuracy());
} else {
return (Double.POSITIVE_INFINITY);
}
}
@Override
public void onLocationChanged(Location location) {
enforce(context);
synchronized (lock) {
if (LocationManager.GPS_PROVIDER.equals(location.getProvider())) {
if (accuracy(location) < accuracy(gps)) {
gps = location;
}
}
if (LocationManager.NETWORK_PROVIDER.equals(location.getProvider())) {
if (accuracy(location) < accuracy(network)) {
gps = location;
}
}
long deadline = once + 120000;
long now = System.currentTimeMillis();
if (deadline <= now && !replied) {
int battery = Battery.level(context);
String message;
if (Double.isInfinite(accuracy(gps)) && Double.isInfinite(accuracy(network))) {
message = "Battery: %d%%; Location unknown";
message = String.format(Locale.US, message, battery);
} else {
if (accuracy(gps) < accuracy(network)) {
location = gps;
} else {
location = network;
}
double lat = location.getLatitude();
double lon = location.getLongitude();
int accuracy = (int) location.getAccuracy();
int altitude = (int) location.getAltitude();
int bearing = (int) location.getBearing();
int speed = (int) (location.getSpeed() * 60.0 * 60.0 / 1000.0);
String time = format.format(new Date(location.getTime()));
message = "Battery: %d%% Location: %s %.5f %.5f ~%dm ^%dm %ddeg %dkm/h http://maps.google.com/?q=%.5f,%.5f";
message = String.format(Locale.US, message, battery, time, lat, lon, accuracy, altitude, bearing, speed, lat, lon);
}
Messenger.sms(Contact.get(context), message);
reset();
replied = true;
}
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
enforce(context);
}
@Override
public void onProviderEnabled(String provider) {
enforce(context);
}
@Override
public void onProviderDisabled(String provider) {
enforce(context);
}
}
| |
package org.sedorn.screenshotter.server.json;
/*
Copyright (c) 2008 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import java.util.Iterator;
/**
* This provides static methods to convert an XML text into a JSONArray or
* JSONObject, and to covert a JSONArray or JSONObject into an XML text using
* the JsonML transform.
*
* @author JSON.org
* @version 2014-05-03
*/
public class JSONML {
/**
* Parse XML values and store them in a JSONArray.
* @param x The XMLTokener containing the source string.
* @param arrayForm true if array form, false if object form.
* @param ja The JSONArray that is containing the current tag or null
* if we are at the outermost level.
* @return A JSONArray if the value is the outermost tag, otherwise null.
* @throws JSONException
*/
private static Object parse(
XMLTokener x,
boolean arrayForm,
JSONArray ja
) throws JSONException {
String attribute;
char c;
String closeTag = null;
int i;
JSONArray newja = null;
JSONObject newjo = null;
Object token;
String tagName = null;
// Test for and skip past these forms:
// <!-- ... -->
// <![ ... ]]>
// <! ... >
// <? ... ?>
while (true) {
if (!x.more()) {
throw x.syntaxError("Bad XML");
}
token = x.nextContent();
if (token == XML.LT) {
token = x.nextToken();
if (token instanceof Character) {
if (token == XML.SLASH) {
// Close tag </
token = x.nextToken();
if (!(token instanceof String)) {
throw new JSONException(
"Expected a closing name instead of '" +
token + "'.");
}
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped close tag");
}
return token;
} else if (token == XML.BANG) {
// <!
c = x.next();
if (c == '-') {
if (x.next() == '-') {
x.skipPast("-->");
} else {
x.back();
}
} else if (c == '[') {
token = x.nextToken();
if (token.equals("CDATA") && x.next() == '[') {
if (ja != null) {
ja.put(x.nextCDATA());
}
} else {
throw x.syntaxError("Expected 'CDATA['");
}
} else {
i = 1;
do {
token = x.nextMeta();
if (token == null) {
throw x.syntaxError("Missing '>' after '<!'.");
} else if (token == XML.LT) {
i += 1;
} else if (token == XML.GT) {
i -= 1;
}
} while (i > 0);
}
} else if (token == XML.QUEST) {
// <?
x.skipPast("?>");
} else {
throw x.syntaxError("Misshaped tag");
}
// Open tag <
} else {
if (!(token instanceof String)) {
throw x.syntaxError("Bad tagName '" + token + "'.");
}
tagName = (String)token;
newja = new JSONArray();
newjo = new JSONObject();
if (arrayForm) {
newja.put(tagName);
if (ja != null) {
ja.put(newja);
}
} else {
newjo.put("tagName", tagName);
if (ja != null) {
ja.put(newjo);
}
}
token = null;
for (;;) {
if (token == null) {
token = x.nextToken();
}
if (token == null) {
throw x.syntaxError("Misshaped tag");
}
if (!(token instanceof String)) {
break;
}
// attribute = value
attribute = (String)token;
if (!arrayForm && ("tagName".equals(attribute) || "childNode".equals(attribute))) {
throw x.syntaxError("Reserved attribute.");
}
token = x.nextToken();
if (token == XML.EQ) {
token = x.nextToken();
if (!(token instanceof String)) {
throw x.syntaxError("Missing value");
}
newjo.accumulate(attribute, XML.stringToValue((String)token));
token = null;
} else {
newjo.accumulate(attribute, "");
}
}
if (arrayForm && newjo.length() > 0) {
newja.put(newjo);
}
// Empty tag <.../>
if (token == XML.SLASH) {
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
// Content, between <...> and </...>
} else {
if (token != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
closeTag = (String)parse(x, arrayForm, newja);
if (closeTag != null) {
if (!closeTag.equals(tagName)) {
throw x.syntaxError("Mismatched '" + tagName +
"' and '" + closeTag + "'");
}
tagName = null;
if (!arrayForm && newja.length() > 0) {
newjo.put("childNodes", newja);
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
}
}
}
} else {
if (ja != null) {
ja.put(token instanceof String
? XML.stringToValue((String)token)
: token);
}
}
}
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The source string.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(String string) throws JSONException {
return toJSONArray(new XMLTokener(string));
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child content and tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(XMLTokener x) throws JSONException {
return (JSONArray)parse(x, true, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener of the XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(XMLTokener x) throws JSONException {
return (JSONObject)parse(x, false, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(String string) throws JSONException {
return toJSONObject(new XMLTokener(string));
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONArray.
* @param ja A JSONArray.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONArray ja) throws JSONException {
int i;
JSONObject jo;
String key;
Iterator<String> keys;
int length;
Object object;
StringBuilder sb = new StringBuilder();
String tagName;
String value;
// Emit <tagName
tagName = ja.getString(0);
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
object = ja.opt(1);
if (object instanceof JSONObject) {
i = 2;
jo = (JSONObject)object;
// Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next();
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
} else {
i = 1;
}
// Emit content in body
length = ja.length();
if (i >= length) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
do {
object = ja.get(i);
i += 1;
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
} else {
sb.append(object.toString());
}
}
} while (i < length);
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONObject.
* The JSONObject must contain a "tagName" property. If it has children,
* then it must have a "childNodes" property containing an array of objects.
* The other properties are attributes with string values.
* @param jo A JSONObject.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONObject jo) throws JSONException {
StringBuilder sb = new StringBuilder();
int i;
JSONArray ja;
String key;
Iterator<String> keys;
int length;
Object object;
String tagName;
String value;
//Emit <tagName
tagName = jo.optString("tagName");
if (tagName == null) {
return XML.escape(jo.toString());
}
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
//Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next();
if (!"tagName".equals(key) && !"childNodes".equals(key)) {
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
}
//Emit content in body
ja = jo.optJSONArray("childNodes");
if (ja == null) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
length = ja.length();
for (i = 0; i < length; i += 1) {
object = ja.get(i);
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
} else {
sb.append(object.toString());
}
}
}
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
}
| |
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.stream.IntStream;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableModel;
public final class MainPanel extends JPanel {
private MainPanel() {
super(new BorderLayout());
String[] columnNames = {"String", "Integer", "Boolean"};
Object[][] data = {
{"aaa", -1, true}
};
DefaultTableModel model = new DefaultTableModel(data, columnNames) {
@Override public Class<?> getColumnClass(int column) {
return getValueAt(0, column).getClass();
}
};
IntStream.range(0, 20)
.mapToObj(i -> new Object[] {"Name: " + i, i, i % 2 == 0})
.forEach(model::addRow);
JTable table = new FishEyeTable(model);
table.setRowSelectionInterval(0, 0);
JScrollPane scroll = new JScrollPane(table);
scroll.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
scroll.setPreferredSize(new Dimension(320, 240));
add(scroll, BorderLayout.NORTH);
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class FishEyeRowContext implements Serializable {
private static final long serialVersionUID = 1L;
public final int height;
public final Font font;
public final Color color;
protected FishEyeRowContext(int height, Font font, Color color) {
this.height = height;
this.font = font;
this.color = color;
}
}
class FishEyeTable extends JTable {
private final List<FishEyeRowContext> fishEyeRowList;
private final Font minFont;
private transient FishEyeTableHandler handler;
protected FishEyeTable(TableModel m) {
super(m);
Font font = getFont();
minFont = font.deriveFont(8f);
Font font12 = font.deriveFont(10f);
Font font18 = font.deriveFont(16f);
Font font24 = font.deriveFont(22f);
Font font32 = font.deriveFont(30f);
Color color12 = new Color(0xFA_FA_FA);
Color color18 = new Color(0xF5_F5_F5);
Color color24 = new Color(0xF0_F0_F0);
Color color32 = new Color(0xE6_E6_FA);
fishEyeRowList = Arrays.asList(
new FishEyeRowContext(12, font12, color12),
new FishEyeRowContext(18, font18, color18),
new FishEyeRowContext(24, font24, color24),
new FishEyeRowContext(32, font32, color32),
new FishEyeRowContext(24, font24, color24),
new FishEyeRowContext(18, font18, color18),
new FishEyeRowContext(12, font12, color12)
);
}
@Override public void updateUI() {
removeMouseListener(handler);
removeMouseMotionListener(handler);
getSelectionModel().removeListSelectionListener(handler);
super.updateUI();
setColumnSelectionAllowed(false);
setRowSelectionAllowed(true);
setFillsViewportHeight(true);
handler = new FishEyeTableHandler();
addMouseListener(handler);
addMouseMotionListener(handler);
getSelectionModel().addListSelectionListener(handler);
}
private class FishEyeTableHandler extends MouseAdapter implements ListSelectionListener {
protected int prevRow = -1;
protected int prevHeight;
@Override public void mouseMoved(MouseEvent e) {
update(rowAtPoint(e.getPoint()));
}
@Override public void mouseDragged(MouseEvent e) {
update(rowAtPoint(e.getPoint()));
}
@Override public void mousePressed(MouseEvent e) {
e.getComponent().repaint();
}
@Override public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
return;
}
update(getSelectedRow());
}
private void update(int row) {
if (prevRow == row) {
return;
}
initRowHeight(prevHeight, row);
prevRow = row;
}
}
@Override public void doLayout() {
super.doLayout();
Container p = SwingUtilities.getAncestorOfClass(JViewport.class, this);
if (!(p instanceof JViewport)) {
return;
}
int h = ((JViewport) p).getExtentSize().height;
if (h == handler.prevHeight) {
return;
}
initRowHeight(h, getSelectedRow());
handler.prevHeight = h;
}
@Override public Component prepareRenderer(TableCellRenderer renderer, int row, int column) {
Component c = super.prepareRenderer(renderer, row, column);
int rowCount = getModel().getRowCount();
Color color = Color.WHITE;
Font font = minFont;
int ccRow = handler.prevRow;
int index = 0;
int rd2 = (fishEyeRowList.size() - 1) / 2;
for (int i = -rd2; i < rowCount; i++) {
if (ccRow - rd2 <= i && i <= ccRow + rd2) {
if (i == row) {
color = fishEyeRowList.get(index).color;
font = fishEyeRowList.get(index).font;
break;
}
index++;
}
}
c.setBackground(color);
c.setFont(font);
if (isRowSelected(row)) {
c.setBackground(getSelectionBackground());
}
return c;
}
private int getViewableColoredRowCount(int idx) {
int rd2 = (fishEyeRowList.size() - 1) / 2;
int rc = getModel().getRowCount();
if (rd2 - idx > 0) {
return rd2 + 1 + idx;
} else if (idx > rc - 1 - rd2 && idx < rc - 1 + rd2) {
return rc - idx + rd2;
}
return fishEyeRowList.size();
}
protected void initRowHeight(int height, int ccRow) {
int rd2 = (fishEyeRowList.size() - 1) / 2;
int rowCount = getModel().getRowCount();
int viewRc = getViewableColoredRowCount(ccRow);
int viewH = getViewHeight(viewRc);
int restRc = rowCount - viewRc;
int restH = height - viewH;
int restRh = Math.max(1, restH / restRc); // restRh = restRh > 0 ? restRh : 1;
int restGap = restH - restRh * restRc;
// System.out.format("%d-%d=%dx%d+%d=%d", height, viewH, restRc, restRh, restGap, restH);
int index = -1;
for (int i = -rd2; i < rowCount; i++) {
int crh;
if (ccRow - rd2 <= i && i <= ccRow + rd2) {
index++;
if (i < 0) {
continue;
}
crh = fishEyeRowList.get(index).height;
} else {
if (i < 0) {
continue;
}
crh = restRh + (restGap > 0 ? 1 : 0);
restGap--;
}
setRowHeight(i, crh);
}
}
private int getViewHeight(int count) {
int h = 0;
for (int i = 0; i < count; i++) {
h += fishEyeRowList.get(i).height;
}
return h;
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.metadata.id3;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataInputBuffer;
import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.ParsableBitArray;
import com.google.android.exoplayer2.util.ParsableByteArray;
import com.google.android.exoplayer2.util.Util;
import com.google.common.base.Ascii;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
/** Decodes ID3 tags. */
public final class Id3Decoder extends SimpleMetadataDecoder {
/** A predicate for determining whether individual frames should be decoded. */
public interface FramePredicate {
/**
* Returns whether a frame with the specified parameters should be decoded.
*
* @param majorVersion The major version of the ID3 tag.
* @param id0 The first byte of the frame ID.
* @param id1 The second byte of the frame ID.
* @param id2 The third byte of the frame ID.
* @param id3 The fourth byte of the frame ID.
* @return Whether the frame should be decoded.
*/
boolean evaluate(int majorVersion, int id0, int id1, int id2, int id3);
}
/** A predicate that indicates no frames should be decoded. */
public static final FramePredicate NO_FRAMES_PREDICATE =
(majorVersion, id0, id1, id2, id3) -> false;
private static final String TAG = "Id3Decoder";
/** The first three bytes of a well formed ID3 tag header. */
public static final int ID3_TAG = 0x00494433;
/** Length of an ID3 tag header. */
public static final int ID3_HEADER_LENGTH = 10;
private static final int FRAME_FLAG_V3_IS_COMPRESSED = 0x0080;
private static final int FRAME_FLAG_V3_IS_ENCRYPTED = 0x0040;
private static final int FRAME_FLAG_V3_HAS_GROUP_IDENTIFIER = 0x0020;
private static final int FRAME_FLAG_V4_IS_COMPRESSED = 0x0008;
private static final int FRAME_FLAG_V4_IS_ENCRYPTED = 0x0004;
private static final int FRAME_FLAG_V4_HAS_GROUP_IDENTIFIER = 0x0040;
private static final int FRAME_FLAG_V4_IS_UNSYNCHRONIZED = 0x0002;
private static final int FRAME_FLAG_V4_HAS_DATA_LENGTH = 0x0001;
private static final int ID3_TEXT_ENCODING_ISO_8859_1 = 0;
private static final int ID3_TEXT_ENCODING_UTF_16 = 1;
private static final int ID3_TEXT_ENCODING_UTF_16BE = 2;
private static final int ID3_TEXT_ENCODING_UTF_8 = 3;
@Nullable private final FramePredicate framePredicate;
public Id3Decoder() {
this(null);
}
/**
* @param framePredicate Determines which frames are decoded. May be null to decode all frames.
*/
public Id3Decoder(@Nullable FramePredicate framePredicate) {
this.framePredicate = framePredicate;
}
@Override
@Nullable
@SuppressWarnings("ByteBufferBackingArray") // Buffer validated by SimpleMetadataDecoder.decode
protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) {
return decode(buffer.array(), buffer.limit());
}
/**
* Decodes ID3 tags.
*
* @param data The bytes to decode ID3 tags from.
* @param size Amount of bytes in {@code data} to read.
* @return A {@link Metadata} object containing the decoded ID3 tags, or null if the data could
* not be decoded.
*/
@Nullable
public Metadata decode(byte[] data, int size) {
List<Id3Frame> id3Frames = new ArrayList<>();
ParsableByteArray id3Data = new ParsableByteArray(data, size);
@Nullable Id3Header id3Header = decodeHeader(id3Data);
if (id3Header == null) {
return null;
}
int startPosition = id3Data.getPosition();
int frameHeaderSize = id3Header.majorVersion == 2 ? 6 : 10;
int framesSize = id3Header.framesSize;
if (id3Header.isUnsynchronized) {
framesSize = removeUnsynchronization(id3Data, id3Header.framesSize);
}
id3Data.setLimit(startPosition + framesSize);
boolean unsignedIntFrameSizeHack = false;
if (!validateFrames(id3Data, id3Header.majorVersion, frameHeaderSize, false)) {
if (id3Header.majorVersion == 4 && validateFrames(id3Data, 4, frameHeaderSize, true)) {
unsignedIntFrameSizeHack = true;
} else {
Log.w(TAG, "Failed to validate ID3 tag with majorVersion=" + id3Header.majorVersion);
return null;
}
}
while (id3Data.bytesLeft() >= frameHeaderSize) {
@Nullable
Id3Frame frame =
decodeFrame(
id3Header.majorVersion,
id3Data,
unsignedIntFrameSizeHack,
frameHeaderSize,
framePredicate);
if (frame != null) {
id3Frames.add(frame);
}
}
return new Metadata(id3Frames);
}
/**
* @param data A {@link ParsableByteArray} from which the header should be read.
* @return The parsed header, or null if the ID3 tag is unsupported.
*/
@Nullable
private static Id3Header decodeHeader(ParsableByteArray data) {
if (data.bytesLeft() < ID3_HEADER_LENGTH) {
Log.w(TAG, "Data too short to be an ID3 tag");
return null;
}
int id = data.readUnsignedInt24();
if (id != ID3_TAG) {
Log.w(TAG, "Unexpected first three bytes of ID3 tag header: 0x" + String.format("%06X", id));
return null;
}
int majorVersion = data.readUnsignedByte();
data.skipBytes(1); // Skip minor version.
int flags = data.readUnsignedByte();
int framesSize = data.readSynchSafeInt();
if (majorVersion == 2) {
boolean isCompressed = (flags & 0x40) != 0;
if (isCompressed) {
Log.w(TAG, "Skipped ID3 tag with majorVersion=2 and undefined compression scheme");
return null;
}
} else if (majorVersion == 3) {
boolean hasExtendedHeader = (flags & 0x40) != 0;
if (hasExtendedHeader) {
int extendedHeaderSize = data.readInt(); // Size excluding size field.
data.skipBytes(extendedHeaderSize);
framesSize -= (extendedHeaderSize + 4);
}
} else if (majorVersion == 4) {
boolean hasExtendedHeader = (flags & 0x40) != 0;
if (hasExtendedHeader) {
int extendedHeaderSize = data.readSynchSafeInt(); // Size including size field.
data.skipBytes(extendedHeaderSize - 4);
framesSize -= extendedHeaderSize;
}
boolean hasFooter = (flags & 0x10) != 0;
if (hasFooter) {
framesSize -= 10;
}
} else {
Log.w(TAG, "Skipped ID3 tag with unsupported majorVersion=" + majorVersion);
return null;
}
// isUnsynchronized is advisory only in version 4. Frame level flags are used instead.
boolean isUnsynchronized = majorVersion < 4 && (flags & 0x80) != 0;
return new Id3Header(majorVersion, isUnsynchronized, framesSize);
}
private static boolean validateFrames(
ParsableByteArray id3Data,
int majorVersion,
int frameHeaderSize,
boolean unsignedIntFrameSizeHack) {
int startPosition = id3Data.getPosition();
try {
while (id3Data.bytesLeft() >= frameHeaderSize) {
// Read the next frame header.
int id;
long frameSize;
int flags;
if (majorVersion >= 3) {
id = id3Data.readInt();
frameSize = id3Data.readUnsignedInt();
flags = id3Data.readUnsignedShort();
} else {
id = id3Data.readUnsignedInt24();
frameSize = id3Data.readUnsignedInt24();
flags = 0;
}
// Validate the frame header and skip to the next one.
if (id == 0 && frameSize == 0 && flags == 0) {
// We've reached zero padding after the end of the final frame.
return true;
} else {
if (majorVersion == 4 && !unsignedIntFrameSizeHack) {
// Parse the data size as a synchsafe integer, as per the spec.
if ((frameSize & 0x808080L) != 0) {
return false;
}
frameSize =
(frameSize & 0xFF)
| (((frameSize >> 8) & 0xFF) << 7)
| (((frameSize >> 16) & 0xFF) << 14)
| (((frameSize >> 24) & 0xFF) << 21);
}
boolean hasGroupIdentifier = false;
boolean hasDataLength = false;
if (majorVersion == 4) {
hasGroupIdentifier = (flags & FRAME_FLAG_V4_HAS_GROUP_IDENTIFIER) != 0;
hasDataLength = (flags & FRAME_FLAG_V4_HAS_DATA_LENGTH) != 0;
} else if (majorVersion == 3) {
hasGroupIdentifier = (flags & FRAME_FLAG_V3_HAS_GROUP_IDENTIFIER) != 0;
// A V3 frame has data length if and only if it's compressed.
hasDataLength = (flags & FRAME_FLAG_V3_IS_COMPRESSED) != 0;
}
int minimumFrameSize = 0;
if (hasGroupIdentifier) {
minimumFrameSize++;
}
if (hasDataLength) {
minimumFrameSize += 4;
}
if (frameSize < minimumFrameSize) {
return false;
}
if (id3Data.bytesLeft() < frameSize) {
return false;
}
id3Data.skipBytes((int) frameSize); // flags
}
}
return true;
} finally {
id3Data.setPosition(startPosition);
}
}
@Nullable
private static Id3Frame decodeFrame(
int majorVersion,
ParsableByteArray id3Data,
boolean unsignedIntFrameSizeHack,
int frameHeaderSize,
@Nullable FramePredicate framePredicate) {
int frameId0 = id3Data.readUnsignedByte();
int frameId1 = id3Data.readUnsignedByte();
int frameId2 = id3Data.readUnsignedByte();
int frameId3 = majorVersion >= 3 ? id3Data.readUnsignedByte() : 0;
int frameSize;
if (majorVersion == 4) {
frameSize = id3Data.readUnsignedIntToInt();
if (!unsignedIntFrameSizeHack) {
frameSize =
(frameSize & 0xFF)
| (((frameSize >> 8) & 0xFF) << 7)
| (((frameSize >> 16) & 0xFF) << 14)
| (((frameSize >> 24) & 0xFF) << 21);
}
} else if (majorVersion == 3) {
frameSize = id3Data.readUnsignedIntToInt();
} else /* id3Header.majorVersion == 2 */ {
frameSize = id3Data.readUnsignedInt24();
}
int flags = majorVersion >= 3 ? id3Data.readUnsignedShort() : 0;
if (frameId0 == 0
&& frameId1 == 0
&& frameId2 == 0
&& frameId3 == 0
&& frameSize == 0
&& flags == 0) {
// We must be reading zero padding at the end of the tag.
id3Data.setPosition(id3Data.limit());
return null;
}
int nextFramePosition = id3Data.getPosition() + frameSize;
if (nextFramePosition > id3Data.limit()) {
Log.w(TAG, "Frame size exceeds remaining tag data");
id3Data.setPosition(id3Data.limit());
return null;
}
if (framePredicate != null
&& !framePredicate.evaluate(majorVersion, frameId0, frameId1, frameId2, frameId3)) {
// Filtered by the predicate.
id3Data.setPosition(nextFramePosition);
return null;
}
// Frame flags.
boolean isCompressed = false;
boolean isEncrypted = false;
boolean isUnsynchronized = false;
boolean hasDataLength = false;
boolean hasGroupIdentifier = false;
if (majorVersion == 3) {
isCompressed = (flags & FRAME_FLAG_V3_IS_COMPRESSED) != 0;
isEncrypted = (flags & FRAME_FLAG_V3_IS_ENCRYPTED) != 0;
hasGroupIdentifier = (flags & FRAME_FLAG_V3_HAS_GROUP_IDENTIFIER) != 0;
// A V3 frame has data length if and only if it's compressed.
hasDataLength = isCompressed;
} else if (majorVersion == 4) {
hasGroupIdentifier = (flags & FRAME_FLAG_V4_HAS_GROUP_IDENTIFIER) != 0;
isCompressed = (flags & FRAME_FLAG_V4_IS_COMPRESSED) != 0;
isEncrypted = (flags & FRAME_FLAG_V4_IS_ENCRYPTED) != 0;
isUnsynchronized = (flags & FRAME_FLAG_V4_IS_UNSYNCHRONIZED) != 0;
hasDataLength = (flags & FRAME_FLAG_V4_HAS_DATA_LENGTH) != 0;
}
if (isCompressed || isEncrypted) {
Log.w(TAG, "Skipping unsupported compressed or encrypted frame");
id3Data.setPosition(nextFramePosition);
return null;
}
if (hasGroupIdentifier) {
frameSize--;
id3Data.skipBytes(1);
}
if (hasDataLength) {
frameSize -= 4;
id3Data.skipBytes(4);
}
if (isUnsynchronized) {
frameSize = removeUnsynchronization(id3Data, frameSize);
}
try {
Id3Frame frame;
if (frameId0 == 'T'
&& frameId1 == 'X'
&& frameId2 == 'X'
&& (majorVersion == 2 || frameId3 == 'X')) {
frame = decodeTxxxFrame(id3Data, frameSize);
} else if (frameId0 == 'T') {
String id = getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3);
frame = decodeTextInformationFrame(id3Data, frameSize, id);
} else if (frameId0 == 'W'
&& frameId1 == 'X'
&& frameId2 == 'X'
&& (majorVersion == 2 || frameId3 == 'X')) {
frame = decodeWxxxFrame(id3Data, frameSize);
} else if (frameId0 == 'W') {
String id = getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3);
frame = decodeUrlLinkFrame(id3Data, frameSize, id);
} else if (frameId0 == 'P' && frameId1 == 'R' && frameId2 == 'I' && frameId3 == 'V') {
frame = decodePrivFrame(id3Data, frameSize);
} else if (frameId0 == 'G'
&& frameId1 == 'E'
&& frameId2 == 'O'
&& (frameId3 == 'B' || majorVersion == 2)) {
frame = decodeGeobFrame(id3Data, frameSize);
} else if (majorVersion == 2
? (frameId0 == 'P' && frameId1 == 'I' && frameId2 == 'C')
: (frameId0 == 'A' && frameId1 == 'P' && frameId2 == 'I' && frameId3 == 'C')) {
frame = decodeApicFrame(id3Data, frameSize, majorVersion);
} else if (frameId0 == 'C'
&& frameId1 == 'O'
&& frameId2 == 'M'
&& (frameId3 == 'M' || majorVersion == 2)) {
frame = decodeCommentFrame(id3Data, frameSize);
} else if (frameId0 == 'C' && frameId1 == 'H' && frameId2 == 'A' && frameId3 == 'P') {
frame =
decodeChapterFrame(
id3Data,
frameSize,
majorVersion,
unsignedIntFrameSizeHack,
frameHeaderSize,
framePredicate);
} else if (frameId0 == 'C' && frameId1 == 'T' && frameId2 == 'O' && frameId3 == 'C') {
frame =
decodeChapterTOCFrame(
id3Data,
frameSize,
majorVersion,
unsignedIntFrameSizeHack,
frameHeaderSize,
framePredicate);
} else if (frameId0 == 'M' && frameId1 == 'L' && frameId2 == 'L' && frameId3 == 'T') {
frame = decodeMlltFrame(id3Data, frameSize);
} else {
String id = getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3);
frame = decodeBinaryFrame(id3Data, frameSize, id);
}
if (frame == null) {
Log.w(
TAG,
"Failed to decode frame: id="
+ getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3)
+ ", frameSize="
+ frameSize);
}
return frame;
} catch (UnsupportedEncodingException e) {
Log.w(TAG, "Unsupported character encoding");
return null;
} finally {
id3Data.setPosition(nextFramePosition);
}
}
@Nullable
private static TextInformationFrame decodeTxxxFrame(ParsableByteArray id3Data, int frameSize)
throws UnsupportedEncodingException {
if (frameSize < 1) {
// Frame is malformed.
return null;
}
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[frameSize - 1];
id3Data.readBytes(data, 0, frameSize - 1);
int descriptionEndIndex = indexOfEos(data, 0, encoding);
String description = new String(data, 0, descriptionEndIndex, charset);
int valueStartIndex = descriptionEndIndex + delimiterLength(encoding);
int valueEndIndex = indexOfEos(data, valueStartIndex, encoding);
String value = decodeStringIfValid(data, valueStartIndex, valueEndIndex, charset);
return new TextInformationFrame("TXXX", description, value);
}
@Nullable
private static TextInformationFrame decodeTextInformationFrame(
ParsableByteArray id3Data, int frameSize, String id) throws UnsupportedEncodingException {
if (frameSize < 1) {
// Frame is malformed.
return null;
}
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[frameSize - 1];
id3Data.readBytes(data, 0, frameSize - 1);
int valueEndIndex = indexOfEos(data, 0, encoding);
String value = new String(data, 0, valueEndIndex, charset);
return new TextInformationFrame(id, null, value);
}
@Nullable
private static UrlLinkFrame decodeWxxxFrame(ParsableByteArray id3Data, int frameSize)
throws UnsupportedEncodingException {
if (frameSize < 1) {
// Frame is malformed.
return null;
}
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[frameSize - 1];
id3Data.readBytes(data, 0, frameSize - 1);
int descriptionEndIndex = indexOfEos(data, 0, encoding);
String description = new String(data, 0, descriptionEndIndex, charset);
int urlStartIndex = descriptionEndIndex + delimiterLength(encoding);
int urlEndIndex = indexOfZeroByte(data, urlStartIndex);
String url = decodeStringIfValid(data, urlStartIndex, urlEndIndex, "ISO-8859-1");
return new UrlLinkFrame("WXXX", description, url);
}
private static UrlLinkFrame decodeUrlLinkFrame(
ParsableByteArray id3Data, int frameSize, String id) throws UnsupportedEncodingException {
byte[] data = new byte[frameSize];
id3Data.readBytes(data, 0, frameSize);
int urlEndIndex = indexOfZeroByte(data, 0);
String url = new String(data, 0, urlEndIndex, "ISO-8859-1");
return new UrlLinkFrame(id, null, url);
}
private static PrivFrame decodePrivFrame(ParsableByteArray id3Data, int frameSize)
throws UnsupportedEncodingException {
byte[] data = new byte[frameSize];
id3Data.readBytes(data, 0, frameSize);
int ownerEndIndex = indexOfZeroByte(data, 0);
String owner = new String(data, 0, ownerEndIndex, "ISO-8859-1");
int privateDataStartIndex = ownerEndIndex + 1;
byte[] privateData = copyOfRangeIfValid(data, privateDataStartIndex, data.length);
return new PrivFrame(owner, privateData);
}
private static GeobFrame decodeGeobFrame(ParsableByteArray id3Data, int frameSize)
throws UnsupportedEncodingException {
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[frameSize - 1];
id3Data.readBytes(data, 0, frameSize - 1);
int mimeTypeEndIndex = indexOfZeroByte(data, 0);
String mimeType = new String(data, 0, mimeTypeEndIndex, "ISO-8859-1");
int filenameStartIndex = mimeTypeEndIndex + 1;
int filenameEndIndex = indexOfEos(data, filenameStartIndex, encoding);
String filename = decodeStringIfValid(data, filenameStartIndex, filenameEndIndex, charset);
int descriptionStartIndex = filenameEndIndex + delimiterLength(encoding);
int descriptionEndIndex = indexOfEos(data, descriptionStartIndex, encoding);
String description =
decodeStringIfValid(data, descriptionStartIndex, descriptionEndIndex, charset);
int objectDataStartIndex = descriptionEndIndex + delimiterLength(encoding);
byte[] objectData = copyOfRangeIfValid(data, objectDataStartIndex, data.length);
return new GeobFrame(mimeType, filename, description, objectData);
}
private static ApicFrame decodeApicFrame(
ParsableByteArray id3Data, int frameSize, int majorVersion)
throws UnsupportedEncodingException {
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[frameSize - 1];
id3Data.readBytes(data, 0, frameSize - 1);
String mimeType;
int mimeTypeEndIndex;
if (majorVersion == 2) {
mimeTypeEndIndex = 2;
mimeType = "image/" + Ascii.toLowerCase(new String(data, 0, 3, "ISO-8859-1"));
if ("image/jpg".equals(mimeType)) {
mimeType = "image/jpeg";
}
} else {
mimeTypeEndIndex = indexOfZeroByte(data, 0);
mimeType = Ascii.toLowerCase(new String(data, 0, mimeTypeEndIndex, "ISO-8859-1"));
if (mimeType.indexOf('/') == -1) {
mimeType = "image/" + mimeType;
}
}
int pictureType = data[mimeTypeEndIndex + 1] & 0xFF;
int descriptionStartIndex = mimeTypeEndIndex + 2;
int descriptionEndIndex = indexOfEos(data, descriptionStartIndex, encoding);
String description =
new String(
data, descriptionStartIndex, descriptionEndIndex - descriptionStartIndex, charset);
int pictureDataStartIndex = descriptionEndIndex + delimiterLength(encoding);
byte[] pictureData = copyOfRangeIfValid(data, pictureDataStartIndex, data.length);
return new ApicFrame(mimeType, description, pictureType, pictureData);
}
@Nullable
private static CommentFrame decodeCommentFrame(ParsableByteArray id3Data, int frameSize)
throws UnsupportedEncodingException {
if (frameSize < 4) {
// Frame is malformed.
return null;
}
int encoding = id3Data.readUnsignedByte();
String charset = getCharsetName(encoding);
byte[] data = new byte[3];
id3Data.readBytes(data, 0, 3);
String language = new String(data, 0, 3);
data = new byte[frameSize - 4];
id3Data.readBytes(data, 0, frameSize - 4);
int descriptionEndIndex = indexOfEos(data, 0, encoding);
String description = new String(data, 0, descriptionEndIndex, charset);
int textStartIndex = descriptionEndIndex + delimiterLength(encoding);
int textEndIndex = indexOfEos(data, textStartIndex, encoding);
String text = decodeStringIfValid(data, textStartIndex, textEndIndex, charset);
return new CommentFrame(language, description, text);
}
private static ChapterFrame decodeChapterFrame(
ParsableByteArray id3Data,
int frameSize,
int majorVersion,
boolean unsignedIntFrameSizeHack,
int frameHeaderSize,
@Nullable FramePredicate framePredicate)
throws UnsupportedEncodingException {
int framePosition = id3Data.getPosition();
int chapterIdEndIndex = indexOfZeroByte(id3Data.getData(), framePosition);
String chapterId =
new String(
id3Data.getData(), framePosition, chapterIdEndIndex - framePosition, "ISO-8859-1");
id3Data.setPosition(chapterIdEndIndex + 1);
int startTime = id3Data.readInt();
int endTime = id3Data.readInt();
long startOffset = id3Data.readUnsignedInt();
if (startOffset == 0xFFFFFFFFL) {
startOffset = C.POSITION_UNSET;
}
long endOffset = id3Data.readUnsignedInt();
if (endOffset == 0xFFFFFFFFL) {
endOffset = C.POSITION_UNSET;
}
ArrayList<Id3Frame> subFrames = new ArrayList<>();
int limit = framePosition + frameSize;
while (id3Data.getPosition() < limit) {
Id3Frame frame =
decodeFrame(
majorVersion, id3Data, unsignedIntFrameSizeHack, frameHeaderSize, framePredicate);
if (frame != null) {
subFrames.add(frame);
}
}
Id3Frame[] subFrameArray = subFrames.toArray(new Id3Frame[0]);
return new ChapterFrame(chapterId, startTime, endTime, startOffset, endOffset, subFrameArray);
}
private static ChapterTocFrame decodeChapterTOCFrame(
ParsableByteArray id3Data,
int frameSize,
int majorVersion,
boolean unsignedIntFrameSizeHack,
int frameHeaderSize,
@Nullable FramePredicate framePredicate)
throws UnsupportedEncodingException {
int framePosition = id3Data.getPosition();
int elementIdEndIndex = indexOfZeroByte(id3Data.getData(), framePosition);
String elementId =
new String(
id3Data.getData(), framePosition, elementIdEndIndex - framePosition, "ISO-8859-1");
id3Data.setPosition(elementIdEndIndex + 1);
int ctocFlags = id3Data.readUnsignedByte();
boolean isRoot = (ctocFlags & 0x0002) != 0;
boolean isOrdered = (ctocFlags & 0x0001) != 0;
int childCount = id3Data.readUnsignedByte();
String[] children = new String[childCount];
for (int i = 0; i < childCount; i++) {
int startIndex = id3Data.getPosition();
int endIndex = indexOfZeroByte(id3Data.getData(), startIndex);
children[i] = new String(id3Data.getData(), startIndex, endIndex - startIndex, "ISO-8859-1");
id3Data.setPosition(endIndex + 1);
}
ArrayList<Id3Frame> subFrames = new ArrayList<>();
int limit = framePosition + frameSize;
while (id3Data.getPosition() < limit) {
@Nullable
Id3Frame frame =
decodeFrame(
majorVersion, id3Data, unsignedIntFrameSizeHack, frameHeaderSize, framePredicate);
if (frame != null) {
subFrames.add(frame);
}
}
Id3Frame[] subFrameArray = subFrames.toArray(new Id3Frame[0]);
return new ChapterTocFrame(elementId, isRoot, isOrdered, children, subFrameArray);
}
private static MlltFrame decodeMlltFrame(ParsableByteArray id3Data, int frameSize) {
// See ID3v2.4.0 native frames subsection 4.6.
int mpegFramesBetweenReference = id3Data.readUnsignedShort();
int bytesBetweenReference = id3Data.readUnsignedInt24();
int millisecondsBetweenReference = id3Data.readUnsignedInt24();
int bitsForBytesDeviation = id3Data.readUnsignedByte();
int bitsForMillisecondsDeviation = id3Data.readUnsignedByte();
ParsableBitArray references = new ParsableBitArray();
references.reset(id3Data);
int referencesBits = 8 * (frameSize - 10);
int bitsPerReference = bitsForBytesDeviation + bitsForMillisecondsDeviation;
int referencesCount = referencesBits / bitsPerReference;
int[] bytesDeviations = new int[referencesCount];
int[] millisecondsDeviations = new int[referencesCount];
for (int i = 0; i < referencesCount; i++) {
int bytesDeviation = references.readBits(bitsForBytesDeviation);
int millisecondsDeviation = references.readBits(bitsForMillisecondsDeviation);
bytesDeviations[i] = bytesDeviation;
millisecondsDeviations[i] = millisecondsDeviation;
}
return new MlltFrame(
mpegFramesBetweenReference,
bytesBetweenReference,
millisecondsBetweenReference,
bytesDeviations,
millisecondsDeviations);
}
private static BinaryFrame decodeBinaryFrame(
ParsableByteArray id3Data, int frameSize, String id) {
byte[] frame = new byte[frameSize];
id3Data.readBytes(frame, 0, frameSize);
return new BinaryFrame(id, frame);
}
/**
* Performs in-place removal of unsynchronization for {@code length} bytes starting from {@link
* ParsableByteArray#getPosition()}
*
* @param data Contains the data to be processed.
* @param length The length of the data to be processed.
* @return The length of the data after processing.
*/
private static int removeUnsynchronization(ParsableByteArray data, int length) {
byte[] bytes = data.getData();
int startPosition = data.getPosition();
for (int i = startPosition; i + 1 < startPosition + length; i++) {
if ((bytes[i] & 0xFF) == 0xFF && bytes[i + 1] == 0x00) {
int relativePosition = i - startPosition;
System.arraycopy(bytes, i + 2, bytes, i + 1, length - relativePosition - 2);
length--;
}
}
return length;
}
/**
* Maps encoding byte from ID3v2 frame to a Charset.
*
* @param encodingByte The value of encoding byte from ID3v2 frame.
* @return Charset name.
*/
private static String getCharsetName(int encodingByte) {
switch (encodingByte) {
case ID3_TEXT_ENCODING_UTF_16:
return "UTF-16";
case ID3_TEXT_ENCODING_UTF_16BE:
return "UTF-16BE";
case ID3_TEXT_ENCODING_UTF_8:
return "UTF-8";
case ID3_TEXT_ENCODING_ISO_8859_1:
default:
return "ISO-8859-1";
}
}
private static String getFrameId(
int majorVersion, int frameId0, int frameId1, int frameId2, int frameId3) {
return majorVersion == 2
? String.format(Locale.US, "%c%c%c", frameId0, frameId1, frameId2)
: String.format(Locale.US, "%c%c%c%c", frameId0, frameId1, frameId2, frameId3);
}
private static int indexOfEos(byte[] data, int fromIndex, int encoding) {
int terminationPos = indexOfZeroByte(data, fromIndex);
// For single byte encoding charsets, we're done.
if (encoding == ID3_TEXT_ENCODING_ISO_8859_1 || encoding == ID3_TEXT_ENCODING_UTF_8) {
return terminationPos;
}
// Otherwise ensure an even offset from the start, and look for a second zero byte.
while (terminationPos < data.length - 1) {
if ((terminationPos - fromIndex) % 2 == 0 && data[terminationPos + 1] == (byte) 0) {
return terminationPos;
}
terminationPos = indexOfZeroByte(data, terminationPos + 1);
}
return data.length;
}
private static int indexOfZeroByte(byte[] data, int fromIndex) {
for (int i = fromIndex; i < data.length; i++) {
if (data[i] == (byte) 0) {
return i;
}
}
return data.length;
}
private static int delimiterLength(int encodingByte) {
return (encodingByte == ID3_TEXT_ENCODING_ISO_8859_1 || encodingByte == ID3_TEXT_ENCODING_UTF_8)
? 1
: 2;
}
/**
* Copies the specified range of an array, or returns a zero length array if the range is invalid.
*
* @param data The array from which to copy.
* @param from The start of the range to copy (inclusive).
* @param to The end of the range to copy (exclusive).
* @return The copied data, or a zero length array if the range is invalid.
*/
private static byte[] copyOfRangeIfValid(byte[] data, int from, int to) {
if (to <= from) {
// Invalid or zero length range.
return Util.EMPTY_BYTE_ARRAY;
}
return Arrays.copyOfRange(data, from, to);
}
/**
* Returns a string obtained by decoding the specified range of {@code data} using the specified
* {@code charsetName}. An empty string is returned if the range is invalid.
*
* @param data The array from which to decode the string.
* @param from The start of the range.
* @param to The end of the range (exclusive).
* @param charsetName The name of the Charset to use.
* @return The decoded string, or an empty string if the range is invalid.
* @throws UnsupportedEncodingException If the Charset is not supported.
*/
private static String decodeStringIfValid(byte[] data, int from, int to, String charsetName)
throws UnsupportedEncodingException {
if (to <= from || to > data.length) {
return "";
}
return new String(data, from, to - from, charsetName);
}
private static final class Id3Header {
private final int majorVersion;
private final boolean isUnsynchronized;
private final int framesSize;
public Id3Header(int majorVersion, boolean isUnsynchronized, int framesSize) {
this.majorVersion = majorVersion;
this.isUnsynchronized = isUnsynchronized;
this.framesSize = framesSize;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticfilesystem.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes the destination file system in the replication configuration.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticfilesystem-2015-02-01/Destination" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Destination implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Describes the status of the destination Amazon EFS file system.
* </p>
*/
private String status;
/**
* <p>
* The ID of the destination Amazon EFS file system.
* </p>
*/
private String fileSystemId;
/**
* <p>
* The Amazon Web Services Region in which the destination file system is located.
* </p>
*/
private String region;
/**
* <p>
* The time when the most recent sync successfully completed on the destination file system. Any changes to data on
* the source file system that occurred prior to this time were successfully replicated to the destination file
* system. Any changes that occurred after this time might not be fully replicated.
* </p>
*/
private java.util.Date lastReplicatedTimestamp;
/**
* <p>
* Describes the status of the destination Amazon EFS file system.
* </p>
*
* @param status
* Describes the status of the destination Amazon EFS file system.
* @see ReplicationStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* Describes the status of the destination Amazon EFS file system.
* </p>
*
* @return Describes the status of the destination Amazon EFS file system.
* @see ReplicationStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* Describes the status of the destination Amazon EFS file system.
* </p>
*
* @param status
* Describes the status of the destination Amazon EFS file system.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ReplicationStatus
*/
public Destination withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* Describes the status of the destination Amazon EFS file system.
* </p>
*
* @param status
* Describes the status of the destination Amazon EFS file system.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ReplicationStatus
*/
public Destination withStatus(ReplicationStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The ID of the destination Amazon EFS file system.
* </p>
*
* @param fileSystemId
* The ID of the destination Amazon EFS file system.
*/
public void setFileSystemId(String fileSystemId) {
this.fileSystemId = fileSystemId;
}
/**
* <p>
* The ID of the destination Amazon EFS file system.
* </p>
*
* @return The ID of the destination Amazon EFS file system.
*/
public String getFileSystemId() {
return this.fileSystemId;
}
/**
* <p>
* The ID of the destination Amazon EFS file system.
* </p>
*
* @param fileSystemId
* The ID of the destination Amazon EFS file system.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Destination withFileSystemId(String fileSystemId) {
setFileSystemId(fileSystemId);
return this;
}
/**
* <p>
* The Amazon Web Services Region in which the destination file system is located.
* </p>
*
* @param region
* The Amazon Web Services Region in which the destination file system is located.
*/
public void setRegion(String region) {
this.region = region;
}
/**
* <p>
* The Amazon Web Services Region in which the destination file system is located.
* </p>
*
* @return The Amazon Web Services Region in which the destination file system is located.
*/
public String getRegion() {
return this.region;
}
/**
* <p>
* The Amazon Web Services Region in which the destination file system is located.
* </p>
*
* @param region
* The Amazon Web Services Region in which the destination file system is located.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Destination withRegion(String region) {
setRegion(region);
return this;
}
/**
* <p>
* The time when the most recent sync successfully completed on the destination file system. Any changes to data on
* the source file system that occurred prior to this time were successfully replicated to the destination file
* system. Any changes that occurred after this time might not be fully replicated.
* </p>
*
* @param lastReplicatedTimestamp
* The time when the most recent sync successfully completed on the destination file system. Any changes to
* data on the source file system that occurred prior to this time were successfully replicated to the
* destination file system. Any changes that occurred after this time might not be fully replicated.
*/
public void setLastReplicatedTimestamp(java.util.Date lastReplicatedTimestamp) {
this.lastReplicatedTimestamp = lastReplicatedTimestamp;
}
/**
* <p>
* The time when the most recent sync successfully completed on the destination file system. Any changes to data on
* the source file system that occurred prior to this time were successfully replicated to the destination file
* system. Any changes that occurred after this time might not be fully replicated.
* </p>
*
* @return The time when the most recent sync successfully completed on the destination file system. Any changes to
* data on the source file system that occurred prior to this time were successfully replicated to the
* destination file system. Any changes that occurred after this time might not be fully replicated.
*/
public java.util.Date getLastReplicatedTimestamp() {
return this.lastReplicatedTimestamp;
}
/**
* <p>
* The time when the most recent sync successfully completed on the destination file system. Any changes to data on
* the source file system that occurred prior to this time were successfully replicated to the destination file
* system. Any changes that occurred after this time might not be fully replicated.
* </p>
*
* @param lastReplicatedTimestamp
* The time when the most recent sync successfully completed on the destination file system. Any changes to
* data on the source file system that occurred prior to this time were successfully replicated to the
* destination file system. Any changes that occurred after this time might not be fully replicated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Destination withLastReplicatedTimestamp(java.util.Date lastReplicatedTimestamp) {
setLastReplicatedTimestamp(lastReplicatedTimestamp);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getFileSystemId() != null)
sb.append("FileSystemId: ").append(getFileSystemId()).append(",");
if (getRegion() != null)
sb.append("Region: ").append(getRegion()).append(",");
if (getLastReplicatedTimestamp() != null)
sb.append("LastReplicatedTimestamp: ").append(getLastReplicatedTimestamp());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Destination == false)
return false;
Destination other = (Destination) obj;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getFileSystemId() == null ^ this.getFileSystemId() == null)
return false;
if (other.getFileSystemId() != null && other.getFileSystemId().equals(this.getFileSystemId()) == false)
return false;
if (other.getRegion() == null ^ this.getRegion() == null)
return false;
if (other.getRegion() != null && other.getRegion().equals(this.getRegion()) == false)
return false;
if (other.getLastReplicatedTimestamp() == null ^ this.getLastReplicatedTimestamp() == null)
return false;
if (other.getLastReplicatedTimestamp() != null && other.getLastReplicatedTimestamp().equals(this.getLastReplicatedTimestamp()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getFileSystemId() == null) ? 0 : getFileSystemId().hashCode());
hashCode = prime * hashCode + ((getRegion() == null) ? 0 : getRegion().hashCode());
hashCode = prime * hashCode + ((getLastReplicatedTimestamp() == null) ? 0 : getLastReplicatedTimestamp().hashCode());
return hashCode;
}
@Override
public Destination clone() {
try {
return (Destination) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.elasticfilesystem.model.transform.DestinationMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.hdf5;
import io.jhdf.HdfFile;
import io.jhdf.api.Attribute;
import io.jhdf.object.datatype.DataType;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
public class HDF5Utils {
private static final Logger logger = LoggerFactory.getLogger(HDF5Utils.class);
/*
* This regex is used to extract the final part of an HDF5 path, which is the name of the data field or column.
* While these look like file paths, they are fully contained within HDF5. This regex would extract part3 from:
* /part1/part2/part3
*/
private static final Pattern PATH_PATTERN = Pattern.compile("/*.*/(.+?)$");
/**
* This function returns and HDF5Attribute object for use when Drill maps the attributes.
*
* @param pathName The path to retrieve attributes from
* @param key The key for the specific attribute you are retrieving
* @param hdf5File The hdfFile reader object for the file you are querying
* @return HDF5Attribute The attribute from the path with the key that was requested.
*/
public static HDF5Attribute getAttribute(String pathName, String key, HdfFile hdf5File) {
if (pathName.equals("")) {
pathName = "/";
}
if (hdf5File.getByPath(pathName) == null) {
return null;
}
if (key.equals("dimensions")) {
int[] dimensions = hdf5File.getDatasetByPath(pathName).getDimensions();
ArrayUtils.reverse(dimensions);
return new HDF5Attribute(MinorType.LIST, "dimensions", dimensions);
}
if (key.equals("dataType")) {
String typeName = hdf5File.getDatasetByPath(pathName).getDataType().getJavaType().getName();
return new HDF5Attribute(getDataType(hdf5File.getDatasetByPath(pathName).getDataType()), "DataType", typeName);
}
if (hdf5File.getByPath(pathName).getAttribute(key) == null) {
return null;
}
Attribute attribute = hdf5File.getByPath(pathName).getAttribute(key);
Class<?> type = hdf5File.getByPath(pathName).getAttribute(key).getJavaType();
if (type.isAssignableFrom(long[].class)) {
return new HDF5Attribute(MinorType.BIGINT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(int[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(short[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(byte[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(double[].class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData(), true);
} else if (type.isAssignableFrom(float[].class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData(), true);
} else if (type.isAssignableFrom(String[].class)) {
return new HDF5Attribute(MinorType.VARCHAR, key, attribute.getData(), true);
} else if (type.isAssignableFrom(java.lang.Long.class)) {
return new HDF5Attribute(MinorType.BIGINT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Integer.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Short.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Byte.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Double.class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData());
} else if (type.isAssignableFrom(float.class)) {
return new HDF5Attribute(MinorType.FLOAT4, key, attribute.getData());
} else if (type.isAssignableFrom(String.class)) {
return new HDF5Attribute(MinorType.VARCHAR, key, attribute.getData());
} else if (type.isAssignableFrom(boolean.class)) {
return new HDF5Attribute(MinorType.BIT, key, attribute.getData());
}/*else if (type.isAssignableFrom(HDF5EnumerationValue.class)) {
// Convert HDF5 Enum to String
return new HDF5Attribute(MinorType.GENERIC_OBJECT, key, attribute.getData());
}*/ else if (type.isAssignableFrom(BitSet.class)) {
return new HDF5Attribute(MinorType.BIT, key, attribute.getData());
}
logger.warn("Reading attributes of type {} not yet implemented.", attribute.getJavaType());
return null;
}
/**
* This function returns the Drill data type of a given HDF5 dataset.
* @param dataType The input data set.
* @return MinorType The Drill data type of the dataset in question
*/
public static MinorType getDataType(DataType dataType) {
Class<?> type = dataType.getJavaType();
if (type == null) {
logger.warn("Datasets of type {} not implemented.", dataType.getDataClass());
//Fall back to string
return MinorType.VARCHAR;
} else if (type.isAssignableFrom(long.class)) {
return MinorType.BIGINT;
} else if (type.isAssignableFrom(short.class)) {
return MinorType.SMALLINT;
} else if (type.isAssignableFrom(byte.class)) {
return MinorType.TINYINT;
} else if (type.isAssignableFrom(int.class)) {
return MinorType.INT;
} else if (type.isAssignableFrom(float.class)) {
return MinorType.FLOAT4;
} else if (type.isAssignableFrom(double.class)) {
return MinorType.FLOAT8;
} else if (type.isAssignableFrom(String.class)) {
return MinorType.VARCHAR;
} else if (type.isAssignableFrom(java.util.Date.class) || type.isAssignableFrom(java.lang.Long.class)) {
return MinorType.TIMESTAMP;
} else if (type.isAssignableFrom(boolean.class) || type.isAssignableFrom(BitSet.class)) {
return MinorType.BIT;
} else if (type.isAssignableFrom(Map.class)) {
return MinorType.MAP;
} else if (type.isAssignableFrom(Enum.class)) {
return MinorType.GENERIC_OBJECT;
}
return MinorType.GENERIC_OBJECT;
}
/**
* This function gets the type of dataset
* @param path The path of the dataset
* @param reader The HDF5 reader
* @return The data type
*/
public static Class<?> getDatasetClass(String path, HdfFile reader) {
return reader.getDatasetByPath(path).getJavaType();
}
/**
* This helper function returns the name of a HDF5 record from a data path
*
* @param path Path to HDF5 data
* @return String name of data
*/
public static String getNameFromPath(String path) {
if( path == null) {
return null;
}
// Now create matcher object.
Matcher m = PATH_PATTERN.matcher(path);
if (m.find()) {
return m.group(1);
} else {
return "";
}
}
public static Object[] toMatrix(Object[] inputArray) {
return flatten(inputArray).toArray();
}
public static boolean[][] toBooleanMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((boolean[][])input[0]).length;
boolean[][] result = new boolean[cols][rows];
for (int i = 0; i < rows; i++) {
boolean[] row = (boolean[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static byte[][] toByteMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((byte[])input[0]).length;
byte[][] result = new byte[cols][rows];
for (int i = 0; i < rows; i++) {
byte[] row = (byte[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static short[][] toShortMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((short[])input[0]).length;
short[][] result = new short[cols][rows];
for (int i = 0; i < rows; i++) {
short[] row = (short[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static int[][] toIntMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((int[])input[0]).length;
int[][] result = new int[cols][rows];
for (int i = 0; i < rows; i++) {
int[] row = (int[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static long[][] toLongMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((long[])input[0]).length;
long[][] result = new long[cols][rows];
for (int i = 0; i < rows; i++) {
long[] row = (long[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static float[][] toFloatMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((float[])input[0]).length;
float[][] result = new float[cols][rows];
for (int i = 0; i < rows; i++) {
float[] row = (float[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static double[][] toDoubleMatrix(Object[] inputArray) {
Object[] input = flatten(inputArray).toArray();
int rows = input.length;
int cols = ((double[])input[0]).length;
double[][] result = new double[cols][rows];
for (int i = 0; i < rows; i++) {
double[] row = (double[])input[i];
for (int j = 0; j < cols; j++) {
result[j][i] = row[j];
}
}
return result;
}
public static Stream<Object> flatten(Object[] array) {
return Arrays.stream(array)
.flatMap(o -> o instanceof Object[]? flatten((Object[])o): Stream.of(o));
}
}
| |
/*
*
* * Copyright 2014 http://Bither.net
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package net.bither.activity.hot;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.View;
import net.bither.BitherSetting;
import net.bither.R;
import net.bither.bitherj.AbstractApp;
import net.bither.bitherj.api.http.Http400Exception;
import net.bither.bitherj.core.AddressManager;
import net.bither.bitherj.core.HDMAddress;
import net.bither.bitherj.core.HDMBId;
import net.bither.bitherj.core.HDMKeychain;
import net.bither.bitherj.crypto.SecureCharSequence;
import net.bither.bitherj.delegate.IPasswordGetterDelegate;
import net.bither.bitherj.utils.Utils;
import net.bither.qrcode.ScanActivity;
import net.bither.runnable.ThreadNeedService;
import net.bither.service.BlockchainService;
import net.bither.ui.base.DropdownMessage;
import net.bither.ui.base.dialog.DialogConfirmTask;
import net.bither.ui.base.dialog.DialogPassword;
import net.bither.ui.base.dialog.DialogProgress;
import net.bither.ui.base.listener.IBackClickListener;
import net.bither.util.ExceptionUtil;
import net.bither.util.LogUtil;
import net.bither.util.ThreadUtil;
import java.util.ArrayList;
import java.util.List;
import kankan.wheel.widget.WheelView;
import kankan.wheel.widget.adapters.AbstractWheelTextAdapter;
/**
* Created by songchenwen on 15/1/12.
*/
public class AddHDMAddressActivity extends FragmentActivity implements IPasswordGetterDelegate {
private static final int ColdPubRequestCode = 1609;
private WheelView wvCount;
private DialogProgress dp;
private HDMKeychain keychain;
private DialogPassword.PasswordGetter passwordGetter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_hdm_address);
keychain = AddressManager.getInstance().getHdmKeychain();
if (keychain == null) {
finish();
return;
}
initView();
}
private void initView() {
findViewById(R.id.ibtn_cancel).setOnClickListener(new IBackClickListener());
findViewById(R.id.btn_add).setOnClickListener(addClick);
wvCount = (WheelView) findViewById(R.id.wv_count);
wvCount.setViewAdapter(new CountAdapter(this));
wvCount.setCurrentItem(0);
dp = new DialogProgress(this, R.string.please_wait);
dp.setCancelable(false);
passwordGetter = new DialogPassword.PasswordGetter(this, this);
}
private View.OnClickListener addClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
int count = wvCount.getCurrentItem() + 1;
if (keychain.uncompletedAddressCount() < count) {
new DialogConfirmTask(v.getContext(),
getString(R.string.hdm_address_add_need_cold_pub), new Runnable() {
@Override
public void run() {
runOnUiThread(new Runnable() {
@Override
public void run() {
startActivityForResult(new Intent(AddHDMAddressActivity.this,
ScanActivity.class), ColdPubRequestCode);
}
});
}
}).show();
return;
}
performAdd();
}
};
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (ColdPubRequestCode == requestCode && resultCode == RESULT_OK) {
final String result = data.getStringExtra(ScanActivity.INTENT_EXTRA_RESULT);
try {
final byte[] pub = Utils.hexStringToByteArray(result);
final int count = Math.min(AbstractApp.bitherjSetting.hdmAddressPerSeedCount() -
keychain.getAllCompletedAddresses().size() - keychain
.uncompletedAddressCount(),
AbstractApp.bitherjSetting.hdmAddressPerSeedPrepareCount());
new Thread() {
@Override
public void run() {
final SecureCharSequence password = passwordGetter.getPassword();
if (password == null) {
return;
}
try {
int prepared = keychain.prepareAddresses(count, password, pub);
LogUtil.i("Add", "try to prepare: " + count + ", prepared: " + prepared);
runOnUiThread(new Runnable() {
@Override
public void run() {
performAdd();
}
});
} catch (Exception e) {
e.printStackTrace();
if (e instanceof HDMKeychain.HDMColdPubNotSameException) {
runOnUiThread(new Runnable() {
@Override
public void run() {
DropdownMessage.showDropdownMessage(AddHDMAddressActivity
.this, R.string.hdm_address_add_cold_pub_not_match);
}
});
}
}
}
}.start();
} catch (Exception e) {
e.printStackTrace();
DropdownMessage.showDropdownMessage(this, R.string.hdm_address_add_need_cold_pub);
}
return;
}
super.onActivityResult(requestCode, resultCode, data);
}
private void performAdd() {
final int count = wvCount.getCurrentItem() + 1;
final DialogProgress dd = dp;
new ThreadNeedService(null, this) {
@Override
public void runWithService(final BlockchainService service) {
final SecureCharSequence password = passwordGetter.getPassword();
if (password == null) {
return;
}
if (service != null) {
service.stopAndUnregister();
}
final List<HDMAddress> as = keychain.completeAddresses(count, password,
new HDMKeychain.HDMFetchRemotePublicKeys() {
@Override
public void completeRemotePublicKeys(CharSequence password,
List<HDMAddress.Pubs>
partialPubs) {
try {
HDMBId hdmBid = HDMBId.getHDMBidFromDb();
HDMKeychain.getRemotePublicKeys(hdmBid, password, partialPubs);
} catch (Exception e) {
e.printStackTrace();
int msg = R.string.network_or_connection_error;
if (e instanceof Http400Exception) {
msg = ExceptionUtil.getHDMHttpExceptionMessage((
(Http400Exception) e).getErrorCode());
}
final int m = msg;
ThreadUtil.runOnMainThread(new Runnable() {
@Override
public void run() {
if (dd.isShowing()) {
dd.dismiss();
}
DropdownMessage.showDropdownMessage(AddHDMAddressActivity.this, m);
}
});
}
}
});
LogUtil.i("Add", "try to complete: " + count + ", completed: " + as.size());
if (service != null) {
service.startAndRegister();
}
runOnUiThread(new Runnable() {
@Override
public void run() {
if (dd.isShowing()) {
dd.dismiss();
}
if (as.size() == 0) {
return;
}
ArrayList<String> s = new ArrayList<String>();
for (HDMAddress a : as) {
s.add(a.getAddress());
}
Intent intent = new Intent();
intent.putExtra(BitherSetting.INTENT_REF.ADDRESS_POSITION_PASS_VALUE_TAG,
s);
setResult(Activity.RESULT_OK, intent);
finish();
}
});
}
}.start();
}
private class CountAdapter extends AbstractWheelTextAdapter {
protected CountAdapter(Context context) {
super(context);
}
@Override
public int getItemsCount() {
int max = AbstractApp.bitherjSetting.hdmAddressPerSeedCount() - AddressManager.getInstance
().getHdmKeychain().getAllCompletedAddresses().size();
return max;
}
@Override
protected CharSequence getItemText(int index) {
return String.valueOf(index + 1);
}
}
public void finish() {
super.finish();
overridePendingTransition(0, R.anim.slide_out_bottom);
}
@Override
public void beforePasswordDialogShow() {
if (dp.isShowing()) {
dp.dismiss();
}
}
@Override
public void afterPasswordDialogDismiss() {
if (!dp.isShowing()) {
dp.show();
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon;
import com.intellij.codeInsight.daemon.DaemonAnalyzerTestCase;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.daemon.LightDaemonAnalyzerTestCase;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspection;
import com.intellij.codeInspection.ex.InspectionToolRegistrar;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.codeInspection.ex.LocalInspectionToolWrapper;
import com.intellij.codeInspection.unusedImport.UnusedImportLocalInspection;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiManagerImpl;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiShortNamesCache;
import com.intellij.testFramework.SkipSlowTestLocally;
import com.intellij.util.ArrayUtil;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import org.intellij.lang.annotations.Language;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.*;
@SkipSlowTestLocally
public class HighlightStressTest extends LightDaemonAnalyzerTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
if ("RandomEditingForUnused".equals(getTestName(false))) {
enableInspectionTool(new UnusedDeclarationInspection());
}
}
@NotNull
@Override
protected LocalInspectionTool[] configureLocalInspectionTools() {
if ("RandomEditingForUnused".equals(getTestName(false))) {
return new LocalInspectionTool[]{new UnusedImportLocalInspection(),};
}
List<InspectionToolWrapper> all = InspectionToolRegistrar.getInstance().createTools();
List<LocalInspectionTool> locals = new ArrayList<>();
for (InspectionToolWrapper tool : all) {
if (tool instanceof LocalInspectionToolWrapper) {
LocalInspectionTool e = ((LocalInspectionToolWrapper)tool).getTool();
locals.add(e);
}
}
return locals.toArray(new LocalInspectionTool[locals.size()]);
}
@SuppressWarnings("All") @Language("JAVA")
@NonNls private static final String text = "import java.util.*; class X { void f ( ) { \n"
+ "List < String > ls = new ArrayList < String > ( 1 ) ; ls . toString ( ) ; \n"
+ "List < Integer > is = new ArrayList < Integer > ( 1 ) ; is . toString ( ) ; \n"
+ "List i = new ArrayList ( 1 ) ; i . toString ( ) ; \n"
+ "Collection < Number > l2 = new ArrayList < Number > ( 10 ) ; l2 . toString ( ) ; \n"
+ "Collection < Number > l22 = new ArrayList < Number > ( ) ; l22 . toString ( ) ; \n"
+ "Map < Number , String > l3 = new HashMap < Number , String > ( 10 ) ; l3 . toString ( ) ; \n"
+ "Map < String , String > m = new HashMap < String , String > ( ) ; m . toString ( ) ; \n"
+ "Map < String , String > m1 = new HashMap < String , String > ( ) ; m1 . toString ( ) ; \n"
+ "Map < String , String > m2 = new HashMap < String , String > ( ) ; m2 . toString ( ) ; \n"
+ "Map < String , String > m3 = new HashMap < String , String > ( ) ; m3 . toString ( ) ; \n"
+ "Map < String , String > mi = new HashMap < String , String > ( 1 ) ; mi . toString ( ) ; \n"
+ "Map < String , String > mi1 = new HashMap < String , String > ( 1 ) ; mi1 . toString ( ) ; \n"
+ "Map < String , String > mi2 = new HashMap < String , String > ( 1 ) ; mi2 . toString ( ) ; \n"
+ "Map < String , String > mi3 = new HashMap < String , String > ( 1 ) ; mi3 . toString ( ) ; \n"
+ "Map < Number , String > l4 = new HashMap < Number , String > ( ) ; l4 . toString ( ) ; \n"
+ "Map < Number , String > l5 = new HashMap < Number , String > ( l4 ) ; l5 . toString ( ) ; \n"
+ "HashMap < Number , String > l6 = new HashMap < Number , String > ( ) ; l6 . toString ( ) ; \n"
+ "Map < List < Integer > , Map < String , List < String > > > l7 = new HashMap ( 1 ) ; l7 . toString ( ) ; \n"
+ "java . util . Map < java . util . List < Integer > , java . util . Map < String , java . util . List < String > > > l77 = \n" +
"new java . util . HashMap ( 1 ) ; l77 . toString ( ) ; \n"
+ " } } ";
public void testAllTheseConcurrentThreadsDoNotCrashAnything() throws Exception {
long time = System.currentTimeMillis();
for (int i = 0; i < 20/*00000*/; i++) {
//System.out.println("i = " + i);
((PsiManagerImpl)getPsiManager()).cleanupForNextTest();
configureFromFileText("Stress.java", text);
List<HighlightInfo> infos = doHighlighting();
assertEmpty(DaemonAnalyzerTestCase.filter(infos, HighlightSeverity.ERROR));
UIUtil.dispatchAllInvocationEvents();
FileEditorManagerEx.getInstanceEx(getProject()).closeAllFiles();
}
System.out.println(System.currentTimeMillis() - time+"ms");
}
public void _testHugeFile() throws Exception {
@NonNls String filePath = "/psi/resolve/Thinlet.java";
configureByFile(filePath);
doHighlighting();
int N = 42;
long[] time = new long[N];
for (int i = 0; i < N; i++) {
DaemonCodeAnalyzer.getInstance(getProject()).restart();
long start = System.currentTimeMillis();
doHighlighting();
long end = System.currentTimeMillis();
time[i] = end - start;
System.out.println("i = " + i + "; time= "+(end-start));
UIUtil.dispatchAllInvocationEvents();
}
System.out.println("Average among the N/3 median times: " + ArrayUtil.averageAmongMedians(time, 3) + "ms");
//System.out.println("JobLauncher.COUNT = " + JobLauncher.COUNT);
//System.out.println("JobLauncher.TINY = " + JobLauncher.TINY_COUNT);
//System.out.println("JobLauncher.LENGTH = " + JobLauncher.LENGTH);
//System.out.println("JobLauncher.ELAPSED = " + JobLauncher.ELAPSED);
//System.out.println("Ave length : "+(JobLauncher.LENGTH.get()/1.0/JobLauncher.COUNT.get()));
//System.out.println("Ave elapsed: "+(JobLauncher.ELAPSED.get()/1.0/JobLauncher.COUNT.get()));
//
//JobLauncher.lengths.sort();
//System.out.println("Lengths: "+JobLauncher.lengths);
}
public void testRandomEditingPerformance() throws Exception {
configureFromFileText("Stress.java", text);
List<HighlightInfo> oldWarnings = new ArrayList<>(doHighlighting());
Comparator<HighlightInfo> infoComparator = (o1, o2) -> {
if (o1.equals(o2)) return 0;
if (o1.getActualStartOffset() != o2.getActualStartOffset()) return o1.getActualStartOffset() - o2.getActualStartOffset();
return text(o1).compareTo(text(o2));
};
Collections.sort(oldWarnings, infoComparator);
List<String> oldWarningTexts = new ArrayList<>();
for (HighlightInfo info : oldWarnings) {
oldWarningTexts.add(text(info));
}
Random random = new Random();
DaemonCodeAnalyzer.getInstance(getProject()).restart();
int N = 20;
long[] time = new long[N];
int oldWarningSize = oldWarnings.size();
for (int i = 0; i < N; i++) {
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
long start = System.currentTimeMillis();
System.out.println("i = " + i);
String s = myFile.getText();
int offset;
while (true) {
offset = random.nextInt(s.length());
if (s.charAt(offset) == ' ') break;
}
myEditor.getCaretModel().moveToOffset(offset);
type("/*--*/");
List<HighlightInfo> infos = doHighlighting();
if (oldWarningSize != infos.size()) {
infos = new ArrayList<>(infos);
Collections.sort(infos, infoComparator);
for (int k=0; k<Math.min(infos.size(), oldWarningSize);k++) {
HighlightInfo info = infos.get(k);
String text = text(info);
String oldText = oldWarningTexts.get(k);
if (!text.equals(oldText)) {
System.err.println(k+"\n"+
"Old: "+oldText+"; info: " + oldWarnings.get(k)+";\n" +
"New: "+text+ "; info: " + info);
break;
}
}
assertEquals(infos.toString(), oldWarningSize, infos.size());
}
for (HighlightInfo info : infos) {
assertNotSame(String.valueOf(info), HighlightSeverity.ERROR, info.getSeverity());
}
for (int k=0; k<"/*--*/".length();k++) {
backspace();
}
UIUtil.dispatchAllInvocationEvents();
long end = System.currentTimeMillis();
time[i] = end - start;
}
FileEditorManagerEx.getInstanceEx(getProject()).closeAllFiles();
System.out.println("Average among the N/3 median times: " + ArrayUtil.averageAmongMedians(time, 3) + "ms");
}
@NotNull
private static String text(@NotNull HighlightInfo info) {
return info.getText() + info.getDescription();
}
public void testRandomEditingForUnused() throws Exception {
configureFromFileText("Stress.java", "class X {<caret>}");
PsiShortNamesCache cache = PsiShortNamesCache.getInstance(getProject());
String[] names = cache.getAllClassNames();
final StringBuilder imports = new StringBuilder();
final StringBuilder usages = new StringBuilder();
int v = 0;
outer:
for (String name : names) {
PsiClass[] classes = cache.getClassesByName(name, GlobalSearchScope.allScope(getProject()));
if (classes.length == 0) continue;
PsiClass aClass = classes[0];
if (!aClass.hasModifierProperty(PsiModifier.PUBLIC)) continue;
if (aClass.getSuperClass() == null) continue;
PsiClassType[] superTypes = aClass.getSuperTypes();
if (superTypes.length == 0) continue;
for (PsiClassType superType : superTypes) {
PsiClass superClass = superType.resolve();
if (superClass == null || !superClass.hasModifierProperty(PsiModifier.PUBLIC)) continue outer;
}
String qualifiedName = aClass.getQualifiedName();
if (qualifiedName.startsWith("java.lang.invoke")) continue ; // java.lang.invoke.MethodHandle has weird access attributes in recent rt.jar which causes spurious highlighting errors
if (!accessible(aClass, new THashSet<>())) continue;
imports.append("import " + qualifiedName + ";\n");
usages.append("/**/ "+aClass.getName() + " var" + v + " = null; var" + v + ".toString();\n");
v++;
if (v>100) break;
}
final String text = imports + "\n class X {{\n" + usages + "}}";
WriteCommandAction.runWriteCommandAction(null, () -> getEditor().getDocument().setText(text));
List<HighlightInfo> errors = DaemonAnalyzerTestCase.filter(doHighlighting(), HighlightSeverity.WARNING);
assertEmpty(text, errors);
Random random = new Random();
int unused = 0;
for (int i = 0; i < 100; i++) {
String s = myFile.getText();
int offset;
while (true) {
offset = random.nextInt(s.length());
if (CharArrayUtil.regionMatches(s, offset, "/**/") || CharArrayUtil.regionMatches(s, offset, "//")) break;
}
char next = offset < s.length()-1 ? s.charAt(offset+1) : 0;
if (next == '/') {
myEditor.getCaretModel().moveToOffset(offset + 1);
type("**");
unused--;
}
else if (next == '*') {
myEditor.getCaretModel().moveToOffset(offset + 1);
delete();
delete();
unused++;
}
else {
continue;
}
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
getFile().accept(new PsiRecursiveElementVisitor() {
@Override
public void visitElement(PsiElement element) {
assertTrue(element.toString(), element.isValid());
super.visitElement(element);
}
});
//System.out.println("i = " + i + " " + next + " at "+offset);
List<HighlightInfo> infos = doHighlighting();
errors = DaemonAnalyzerTestCase.filter(infos, HighlightSeverity.ERROR);
assertEmpty(errors);
List<HighlightInfo> warns = DaemonAnalyzerTestCase.filter(infos, HighlightSeverity.WARNING);
if (unused != warns.size()) {
assertEquals(warns.toString(), unused, warns.size());
}
}
FileEditorManagerEx.getInstanceEx(getProject()).closeAllFiles();
}
private static boolean accessible(PsiClass aClass, Set<PsiClass> visited) {
if (!visited.add(aClass)) return false;
// this class and all its super- and containing- classes should be public
if (!aClass.hasModifierProperty(PsiModifier.PUBLIC)) return false;
for (PsiClass superClass : aClass.getSupers()) {
if (!accessible(superClass, visited)) return false;
}
PsiClass containingClass = aClass.getContainingClass();
return containingClass == null || accessible(containingClass, visited);
}
}
| |
package org.antlr.intellij.plugin;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.ExternalAnnotator;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import org.antlr.intellij.plugin.parsing.RunANTLROnGrammarFile;
import org.antlr.intellij.plugin.psi.MyPsiUtils;
import org.antlr.runtime.ANTLRReaderStream;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.Token;
import org.antlr.v4.Tool;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.GrammarSyntaxMessage;
import org.antlr.v4.tool.LeftRecursionCyclesMessage;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ToolMessage;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.GrammarRootAST;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.stringtemplate.v4.ST;
import java.io.File;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class ANTLRv4ExternalAnnotator extends ExternalAnnotator<PsiFile, List<ANTLRv4ExternalAnnotator.Issue>> {
// NOTE: can't use instance var as only 1 instance
public static final Logger LOG = Logger.getInstance("ANTLR ANTLRv4ExternalAnnotator");
public static class Issue {
String annotation;
List<Token> offendingTokens = new ArrayList<Token>();
ANTLRMessage msg;
public Issue(ANTLRMessage msg) { this.msg = msg; }
}
/** Called first; return file; idea 12 */
@Nullable
public PsiFile collectionInformation(@NotNull PsiFile file) {
LOG.info("collectionInformation "+file.getVirtualFile());
return file;
}
/** Called first; return file; idea 13; can't use @Override */
@Nullable
public PsiFile collectInformation(@NotNull PsiFile file) {
LOG.info("collectionInformation "+file.getVirtualFile());
return file;
}
/** Called 2nd; run antlr on file */
@Nullable
@Override
public List<ANTLRv4ExternalAnnotator.Issue> doAnnotate(final PsiFile file) {
String fileContents = file.getText();
List<String> args = RunANTLROnGrammarFile.getANTLRArgsAsList(file.getProject(), file.getVirtualFile());
final Tool antlr = new Tool(args.toArray(new String[args.size()]));
if ( !args.contains("-lib") ) {
// getContainingDirectory() must be identified as a read operation on file system
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
antlr.libDirectory = file.getContainingDirectory().toString();
}
});
}
final FindVocabFileRunnable findVocabAction = new FindVocabFileRunnable(file);
ApplicationManager.getApplication().runReadAction(findVocabAction);
if ( findVocabAction.vocabName!=null ) { // need to generate other file?
// for now, just turn off undef token warnings
}
antlr.removeListeners();
AnnotatorToolListener listener = new AnnotatorToolListener(findVocabAction.vocabName);
antlr.addListener(listener);
try {
StringReader sr = new StringReader(fileContents);
ANTLRReaderStream in = new ANTLRReaderStream(sr);
in.name = file.getName();
GrammarRootAST ast = antlr.parse(file.getName(), in);
if ( ast==null || ast.hasErrors ) return Collections.emptyList();
Grammar g = antlr.createGrammar(ast);
VirtualFile vfile = file.getVirtualFile();
if ( vfile==null ) {
LOG.error("doAnnotate no virtual file for "+file);
return listener.issues;
}
g.fileName = vfile.getPath();
antlr.process(g, false);
for (Issue issue : listener.issues) {
processIssue(file, issue);
}
}
catch (Exception e) {
LOG.error("antlr can't process "+file.getName(), e);
}
return listener.issues;
}
/** Called 3rd */
@Override
public void apply(@NotNull PsiFile file,
List<ANTLRv4ExternalAnnotator.Issue> issues,
@NotNull AnnotationHolder holder)
{
for (int i = 0; i < issues.size(); i++) {
Issue issue = issues.get(i);
for (int j = 0; j < issue.offendingTokens.size(); j++) {
Token t = issue.offendingTokens.get(j);
if ( t instanceof CommonToken ) {
CommonToken ct = (CommonToken)t;
int startIndex = ct.getStartIndex();
int stopIndex = ct.getStopIndex();
TextRange range = new TextRange(startIndex, stopIndex + 1);
switch (issue.msg.getErrorType().severity) {
case ERROR:
case ERROR_ONE_OFF:
case FATAL:
holder.createErrorAnnotation(range, issue.annotation);
break;
case WARNING:
holder.createWarningAnnotation(range, issue.annotation);
break;
case WARNING_ONE_OFF:
case INFO:
holder.createWeakWarningAnnotation(range, issue.annotation);
default:
break;
}
}
}
}
super.apply(file, issues, holder);
}
public void processIssue(final PsiFile file, Issue issue) {
File grammarFile = new File(file.getVirtualFile().getPath());
File issueFile = new File(issue.msg.fileName);
if ( !grammarFile.getName().equals(issueFile.getName()) ) {
return; // ignore errors from external files
}
if ( issue.msg instanceof GrammarSemanticsMessage ) {
Token t = ((GrammarSemanticsMessage)issue.msg).offendingToken;
issue.offendingTokens.add(t);
}
else if ( issue.msg instanceof LeftRecursionCyclesMessage ) {
List<String> rulesToHighlight = new ArrayList<String>();
LeftRecursionCyclesMessage lmsg = (LeftRecursionCyclesMessage)issue.msg;
Collection<? extends Collection<Rule>> cycles =
(Collection<? extends Collection<Rule>>)lmsg.getArgs()[0];
for (Collection<Rule> cycle : cycles) {
for (Rule r : cycle) {
rulesToHighlight.add(r.name);
GrammarAST nameNode = (GrammarAST)r.ast.getChild(0);
issue.offendingTokens.add(nameNode.getToken());
}
}
}
else if ( issue.msg instanceof GrammarSyntaxMessage ) {
Token t = issue.msg.offendingToken;
issue.offendingTokens.add(t);
}
else if ( issue.msg instanceof ToolMessage ) {
issue.offendingTokens.add(issue.msg.offendingToken);
}
Tool antlr = new Tool();
ST msgST = antlr.errMgr.getMessageTemplate(issue.msg);
String outputMsg = msgST.render();
if (antlr.errMgr.formatWantsSingleLineMessage()) {
outputMsg = outputMsg.replace('\n', ' ');
}
issue.annotation = outputMsg;
}
protected static class FindVocabFileRunnable implements Runnable {
public String vocabName;
private final PsiFile file;
public FindVocabFileRunnable(PsiFile file) {
this.file = file;
}
@Override
public void run() {
vocabName = MyPsiUtils.findTokenVocabIfAny((ANTLRv4FileRoot) file);
}
}
}
| |
package org.eclipse.jetty.util.log;
import static org.hamcrest.Matchers.is;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogManager;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class JavaUtilLogTest
{
private static Handler[] originalHandlers;
private static CapturingJULHandler jul;
@BeforeClass
public static void setJUL()
{
LogManager lmgr = LogManager.getLogManager();
java.util.logging.Logger root = lmgr.getLogger("");
// Remember original handlers
originalHandlers = root.getHandlers();
// Remove original handlers
for (Handler existing : originalHandlers)
{
root.removeHandler(existing);
}
// Set test/capturing handler
jul = new CapturingJULHandler();
root.addHandler(jul);
}
@AfterClass
public static void restoreJUL()
{
LogManager lmgr = LogManager.getLogManager();
java.util.logging.Logger root = lmgr.getLogger("");
// Remove test handlers
for (Handler existing : root.getHandlers())
{
root.removeHandler(existing);
}
// Restore original handlers
for (Handler original : originalHandlers)
{
root.addHandler(original);
}
}
@Test
public void testNamedLogger()
{
jul.clear();
JavaUtilLog log = new JavaUtilLog("test");
log.info("Info test");
jul.assertContainsLine("INFO|test|Info test");
JavaUtilLog loglong = new JavaUtilLog("test.a.long.name");
loglong.info("Long test");
jul.assertContainsLine("INFO|test.a.long.name|Long test");
}
@Test
public void testDebugOutput()
{
jul.clear();
// Common Throwable (for test)
Throwable th = new Throwable("Message");
// Capture raw string form
StringWriter tout = new StringWriter();
th.printStackTrace(new PrintWriter(tout));
String ths = tout.toString();
// Tests
JavaUtilLog log = new JavaUtilLog("test.de.bug");
setJulLevel("test.de.bug",Level.FINE);
log.debug("Simple debug");
log.debug("Debug with {} parameter",1);
log.debug("Debug with {} {} parameters", 2, "spiffy");
log.debug("Debug with throwable", th);
log.debug(th);
// jul.dump();
jul.assertContainsLine("FINE|test.de.bug|Simple debug");
jul.assertContainsLine("FINE|test.de.bug|Debug with 1 parameter");
jul.assertContainsLine("FINE|test.de.bug|Debug with 2 spiffy parameters");
jul.assertContainsLine("FINE|test.de.bug|Debug with throwable");
jul.assertContainsLine(ths);
}
@Test
public void testInfoOutput()
{
jul.clear();
// Common Throwable (for test)
Throwable th = new Throwable("Message");
// Capture raw string form
StringWriter tout = new StringWriter();
th.printStackTrace(new PrintWriter(tout));
String ths = tout.toString();
// Tests
JavaUtilLog log = new JavaUtilLog("test.in.fo");
setJulLevel("test.in.fo",Level.INFO);
log.info("Simple info");
log.info("Info with {} parameter",1);
log.info("Info with {} {} parameters", 2, "spiffy");
log.info("Info with throwable", th);
log.info(th);
// jul.dump();
jul.assertContainsLine("INFO|test.in.fo|Simple info");
jul.assertContainsLine("INFO|test.in.fo|Info with 1 parameter");
jul.assertContainsLine("INFO|test.in.fo|Info with 2 spiffy parameters");
jul.assertContainsLine("INFO|test.in.fo|Info with throwable");
jul.assertContainsLine(ths);
}
@Test
public void testWarnOutput()
{
jul.clear();
// Common Throwable (for test)
Throwable th = new Throwable("Message");
// Capture raw string form
StringWriter tout = new StringWriter();
th.printStackTrace(new PrintWriter(tout));
String ths = tout.toString();
// Tests
JavaUtilLog log = new JavaUtilLog("test.wa.rn");
setJulLevel("test.wa.rn",Level.WARNING);
log.warn("Simple warn");
log.warn("Warn with {} parameter",1);
log.warn("Warn with {} {} parameters", 2, "spiffy");
log.warn("Warn with throwable", th);
log.warn(th);
// jul.dump();
jul.assertContainsLine("WARNING|test.wa.rn|Simple warn");
jul.assertContainsLine("WARNING|test.wa.rn|Warn with 1 parameter");
jul.assertContainsLine("WARNING|test.wa.rn|Warn with 2 spiffy parameters");
jul.assertContainsLine("WARNING|test.wa.rn|Warn with throwable");
jul.assertContainsLine(ths);
}
@Test
public void testFormattingWithNulls()
{
jul.clear();
JavaUtilLog log = new JavaUtilLog("test.nu.ll");
setJulLevel("test.nu.ll",Level.INFO);
log.info("Testing info(msg,null,null) - {} {}","arg0","arg1");
log.info("Testing info(msg,null,null) - {}/{}",null,null);
log.info("Testing info(msg,null,null) > {}",null,null);
log.info("Testing info(msg,null,null)",null,null);
log.info(null,"Testing","info(null,arg0,arg1)");
log.info(null,null,null);
jul.dump();
jul.assertContainsLine("INFO|test.nu.ll|Testing info(msg,null,null) - null/null");
jul.assertContainsLine("INFO|test.nu.ll|Testing info(msg,null,null) > null null");
jul.assertContainsLine("INFO|test.nu.ll|Testing info(msg,null,null) null null");
jul.assertContainsLine("INFO|test.nu.ll|null Testing info(null,arg0,arg1)");
jul.assertContainsLine("INFO|test.nu.ll|null null null");
}
@Test
public void testIsDebugEnabled() {
JavaUtilLog log = new JavaUtilLog("test.legacy");
setJulLevel("test.legacy",Level.ALL);
Assert.assertThat("log.level(all).isDebugEnabled", log.isDebugEnabled(), is(true));
setJulLevel("test.legacy",Level.FINEST);
Assert.assertThat("log.level(finest).isDebugEnabled", log.isDebugEnabled(), is(true));
setJulLevel("test.legacy",Level.FINER);
Assert.assertThat("log.level(finer).isDebugEnabled", log.isDebugEnabled(), is(true));
setJulLevel("test.legacy",Level.FINE);
Assert.assertThat("log.level(fine).isDebugEnabled", log.isDebugEnabled(), is(true));
setJulLevel("test.legacy",Level.INFO);
Assert.assertThat("log.level(info).isDebugEnabled", log.isDebugEnabled(), is(false));
setJulLevel("test.legacy",Level.WARNING);
Assert.assertThat("log.level(warn).isDebugEnabled", log.isDebugEnabled(), is(false));
log.setDebugEnabled(true);
Assert.assertThat("log.isDebugEnabled", log.isDebugEnabled(), is(true));
log.setDebugEnabled(false);
Assert.assertThat("log.isDebugEnabled", log.isDebugEnabled(), is(false));
}
private void setJulLevel(String name, Level lvl)
{
java.util.logging.Logger log = java.util.logging.Logger.getLogger(name);
log.setLevel(lvl);
}
}
| |
package butterknife.internal;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
final class ViewInjector
{
private final String className;
private final String classPackage;
private final Map<CollectionBinding, int[]> collectionBindings = new LinkedHashMap();
private String parentInjector;
private final String targetClass;
private final Map<Integer, ViewInjection> viewIdMap = new LinkedHashMap();
ViewInjector(String paramString1, String paramString2, String paramString3)
{
this.classPackage = paramString1;
this.className = paramString2;
this.targetClass = paramString3;
}
static void emitCastIfNeeded(StringBuilder paramStringBuilder, String paramString)
{
emitCastIfNeeded(paramStringBuilder, "android.view.View", paramString);
}
static void emitCastIfNeeded(StringBuilder paramStringBuilder, String paramString1, String paramString2)
{
if (!paramString1.equals(paramString2))
paramStringBuilder.append('(').append(paramString2).append(") ");
}
private void emitCollectionBinding(StringBuilder paramStringBuilder, CollectionBinding paramCollectionBinding, int[] paramArrayOfInt)
{
paramStringBuilder.append(" target.").append(paramCollectionBinding.getName()).append(" = ");
int i;
switch (ViewInjector.1.$SwitchMap$butterknife$internal$CollectionBinding$Kind[paramCollectionBinding.getKind().ordinal()])
{
default:
throw new IllegalStateException("Unknown kind: " + paramCollectionBinding.getKind());
case 1:
paramStringBuilder.append("Finder.arrayOf(");
i = 0;
label89: if (i >= paramArrayOfInt.length)
break label199;
if (i > 0)
paramStringBuilder.append(',');
paramStringBuilder.append("\n ");
emitCastIfNeeded(paramStringBuilder, paramCollectionBinding.getType());
if (!paramCollectionBinding.isRequired())
break;
paramStringBuilder.append("finder.findRequiredView(source, ").append(paramArrayOfInt[i]).append(", \"").append(paramCollectionBinding.getName()).append("\")");
case 2:
}
while (true)
{
i++;
break label89;
paramStringBuilder.append("Finder.listOf(");
break;
paramStringBuilder.append("finder.findOptionalView(source, ").append(paramArrayOfInt[i]).append(")");
}
label199: paramStringBuilder.append("\n );");
}
static void emitHumanDescription(StringBuilder paramStringBuilder, List<Binding> paramList)
{
int i;
int j;
switch (paramList.size())
{
default:
i = paramList.size();
j = 0;
case 1:
while (j < i)
{
Binding localBinding = (Binding)paramList.get(j);
if (j != 0)
paramStringBuilder.append(", ");
if (j == i - 1)
paramStringBuilder.append("and ");
paramStringBuilder.append(localBinding.getDescription());
j++;
continue;
paramStringBuilder.append(((Binding)paramList.get(0)).getDescription());
}
return;
case 2:
}
paramStringBuilder.append(((Binding)paramList.get(0)).getDescription()).append(" and ").append(((Binding)paramList.get(1)).getDescription());
}
private void emitInject(StringBuilder paramStringBuilder)
{
paramStringBuilder.append(" public static void inject(Finder finder, final ").append(this.targetClass).append(" target, Object source) {\n");
if (this.parentInjector != null)
paramStringBuilder.append(" ").append(this.parentInjector).append(".inject(finder, target, source);\n\n");
paramStringBuilder.append(" View view;\n");
Iterator localIterator1 = this.viewIdMap.values().iterator();
while (localIterator1.hasNext())
emitViewInjection(paramStringBuilder, (ViewInjection)localIterator1.next());
Iterator localIterator2 = this.collectionBindings.entrySet().iterator();
while (localIterator2.hasNext())
{
Map.Entry localEntry = (Map.Entry)localIterator2.next();
emitCollectionBinding(paramStringBuilder, (CollectionBinding)localEntry.getKey(), (int[])localEntry.getValue());
}
paramStringBuilder.append(" }\n");
}
private void emitListenerBindings(StringBuilder paramStringBuilder, ViewInjection paramViewInjection)
{
Map localMap1 = paramViewInjection.getListenerBindings();
if (localMap1.isEmpty())
return;
boolean bool = paramViewInjection.getRequiredBindings().isEmpty();
if (bool)
paramStringBuilder.append(" if (view != null) {\n");
for (String str = " "; ; str = "")
{
Iterator localIterator1 = localMap1.entrySet().iterator();
while (localIterator1.hasNext())
{
Map.Entry localEntry = (Map.Entry)localIterator1.next();
ListenerClass localListenerClass = (ListenerClass)localEntry.getKey();
Map localMap2 = (Map)localEntry.getValue();
int i;
if (!"android.view.View".equals(localListenerClass.targetType()))
i = 1;
while (true)
{
paramStringBuilder.append(str).append(" ");
if (i == 0)
break label230;
paramStringBuilder.append("((").append(localListenerClass.targetType());
if (localListenerClass.genericArguments() <= 0)
break;
paramStringBuilder.append('<');
int i3 = 0;
while (true)
if (i3 < localListenerClass.genericArguments())
{
if (i3 > 0)
paramStringBuilder.append(", ");
paramStringBuilder.append('?');
i3++;
continue;
i = 0;
break;
}
paramStringBuilder.append('>');
}
paramStringBuilder.append(") ");
label230: paramStringBuilder.append("view");
if (i != 0)
paramStringBuilder.append(')');
paramStringBuilder.append('.').append(localListenerClass.setter()).append("(\n");
paramStringBuilder.append(str).append(" new ").append(localListenerClass.type()).append("() {\n");
Iterator localIterator2 = getListenerMethods(localListenerClass).iterator();
while (localIterator2.hasNext())
{
ListenerMethod localListenerMethod = (ListenerMethod)localIterator2.next();
paramStringBuilder.append(str).append(" @Override public ").append(localListenerMethod.returnType()).append(' ').append(localListenerMethod.name()).append("(\n");
String[] arrayOfString1 = localListenerMethod.parameters();
int j = 0;
int k = arrayOfString1.length;
while (j < k)
{
paramStringBuilder.append(str).append(" ").append(arrayOfString1[j]).append(" p").append(j);
if (j < k - 1)
paramStringBuilder.append(',');
paramStringBuilder.append('\n');
j++;
}
paramStringBuilder.append(str).append(" ) {\n");
paramStringBuilder.append(str).append(" ");
int m;
Iterator localIterator3;
if (!"void".equals(localListenerMethod.returnType()))
{
m = 1;
if (m != 0)
paramStringBuilder.append("return ");
if (localMap2.containsKey(localListenerMethod))
localIterator3 = ((Set)localMap2.get(localListenerMethod)).iterator();
}
else
{
while (true)
{
if (!localIterator3.hasNext())
break label753;
ListenerBinding localListenerBinding = (ListenerBinding)localIterator3.next();
paramStringBuilder.append("target.").append(localListenerBinding.getName()).append('(');
List localList = localListenerBinding.getParameters();
String[] arrayOfString2 = localListenerMethod.parameters();
int n = localList.size();
int i1 = 0;
while (true)
if (i1 < n)
{
Parameter localParameter = (Parameter)localList.get(i1);
int i2 = localParameter.getListenerPosition();
emitCastIfNeeded(paramStringBuilder, arrayOfString2[i2], localParameter.getType());
paramStringBuilder.append('p').append(i2);
if (i1 < n - 1)
paramStringBuilder.append(", ");
i1++;
continue;
m = 0;
break;
}
paramStringBuilder.append(");");
if (!localIterator3.hasNext())
continue;
paramStringBuilder.append("\n ");
}
}
if (m != 0)
paramStringBuilder.append(localListenerMethod.defaultReturn()).append(';');
label753: paramStringBuilder.append('\n');
paramStringBuilder.append(str).append(" }\n");
}
paramStringBuilder.append(str).append(" });\n");
}
if (!bool)
break;
paramStringBuilder.append(" }\n");
return;
}
}
private void emitReset(StringBuilder paramStringBuilder)
{
paramStringBuilder.append(" public static void reset(").append(this.targetClass).append(" target) {\n");
if (this.parentInjector != null)
paramStringBuilder.append(" ").append(this.parentInjector).append(".reset(target);\n\n");
Iterator localIterator1 = this.viewIdMap.values().iterator();
while (localIterator1.hasNext())
{
Iterator localIterator3 = ((ViewInjection)localIterator1.next()).getViewBindings().iterator();
while (localIterator3.hasNext())
{
ViewBinding localViewBinding = (ViewBinding)localIterator3.next();
paramStringBuilder.append(" target.").append(localViewBinding.getName()).append(" = null;\n");
}
}
Iterator localIterator2 = this.collectionBindings.keySet().iterator();
while (localIterator2.hasNext())
{
CollectionBinding localCollectionBinding = (CollectionBinding)localIterator2.next();
paramStringBuilder.append(" target.").append(localCollectionBinding.getName()).append(" = null;\n");
}
paramStringBuilder.append(" }\n");
}
private void emitViewBindings(StringBuilder paramStringBuilder, ViewInjection paramViewInjection)
{
Collection localCollection = paramViewInjection.getViewBindings();
if (localCollection.isEmpty());
while (true)
{
return;
Iterator localIterator = localCollection.iterator();
while (localIterator.hasNext())
{
ViewBinding localViewBinding = (ViewBinding)localIterator.next();
paramStringBuilder.append(" target.").append(localViewBinding.getName()).append(" = ");
emitCastIfNeeded(paramStringBuilder, localViewBinding.getType());
paramStringBuilder.append("view;\n");
}
}
}
private void emitViewInjection(StringBuilder paramStringBuilder, ViewInjection paramViewInjection)
{
paramStringBuilder.append(" view = ");
List localList = paramViewInjection.getRequiredBindings();
if (localList.isEmpty())
paramStringBuilder.append("finder.findOptionalView(source, ").append(paramViewInjection.getId()).append(");\n");
while (true)
{
emitViewBindings(paramStringBuilder, paramViewInjection);
emitListenerBindings(paramStringBuilder, paramViewInjection);
return;
if (paramViewInjection.getId() == -1)
{
paramStringBuilder.append("target;\n");
continue;
}
paramStringBuilder.append("finder.findRequiredView(source, ").append(paramViewInjection.getId()).append(", \"");
emitHumanDescription(paramStringBuilder, localList);
paramStringBuilder.append("\");\n");
}
}
static List<ListenerMethod> getListenerMethods(ListenerClass paramListenerClass)
{
if (paramListenerClass.method().length == 1)
return Arrays.asList(paramListenerClass.method());
ArrayList localArrayList;
while (true)
{
int j;
ListenerMethod localListenerMethod;
try
{
localArrayList = new ArrayList();
Class localClass = paramListenerClass.callbacks();
Enum[] arrayOfEnum = (Enum[])localClass.getEnumConstants();
int i = arrayOfEnum.length;
j = 0;
if (j >= i)
break;
Enum localEnum = arrayOfEnum[j];
localListenerMethod = (ListenerMethod)localClass.getField(localEnum.name()).getAnnotation(ListenerMethod.class);
if (localListenerMethod == null)
{
Object[] arrayOfObject = new Object[4];
arrayOfObject[0] = localClass.getEnclosingClass().getSimpleName();
arrayOfObject[1] = localClass.getSimpleName();
arrayOfObject[2] = localEnum.name();
arrayOfObject[3] = ListenerMethod.class.getSimpleName();
throw new IllegalStateException(String.format("@%s's %s.%s missing @%s annotation.", arrayOfObject));
}
}
catch (NoSuchFieldException localNoSuchFieldException)
{
throw new AssertionError(localNoSuchFieldException);
}
localArrayList.add(localListenerMethod);
j++;
}
return localArrayList;
}
private ViewInjection getOrCreateViewInjection(int paramInt)
{
ViewInjection localViewInjection = (ViewInjection)this.viewIdMap.get(Integer.valueOf(paramInt));
if (localViewInjection == null)
{
localViewInjection = new ViewInjection(paramInt);
this.viewIdMap.put(Integer.valueOf(paramInt), localViewInjection);
}
return localViewInjection;
}
final void addCollection(int[] paramArrayOfInt, CollectionBinding paramCollectionBinding)
{
this.collectionBindings.put(paramCollectionBinding, paramArrayOfInt);
}
final boolean addListener(int paramInt, ListenerClass paramListenerClass, ListenerMethod paramListenerMethod, ListenerBinding paramListenerBinding)
{
ViewInjection localViewInjection = getOrCreateViewInjection(paramInt);
if ((localViewInjection.hasListenerBinding(paramListenerClass, paramListenerMethod)) && (!"void".equals(paramListenerMethod.returnType())))
return false;
localViewInjection.addListenerBinding(paramListenerClass, paramListenerMethod, paramListenerBinding);
return true;
}
final void addView(int paramInt, ViewBinding paramViewBinding)
{
getOrCreateViewInjection(paramInt).addViewBinding(paramViewBinding);
}
final String brewJava()
{
StringBuilder localStringBuilder = new StringBuilder();
localStringBuilder.append("// Generated code from Butter Knife. Do not modify!\n");
localStringBuilder.append("package ").append(this.classPackage).append(";\n\n");
localStringBuilder.append("import android.view.View;\n");
localStringBuilder.append("import butterknife.ButterKnife.Finder;\n\n");
localStringBuilder.append("public class ").append(this.className).append(" {\n");
emitInject(localStringBuilder);
localStringBuilder.append('\n');
emitReset(localStringBuilder);
localStringBuilder.append("}\n");
return localStringBuilder.toString();
}
final String getFqcn()
{
return this.classPackage + "." + this.className;
}
final ViewInjection getViewInjection(int paramInt)
{
return (ViewInjection)this.viewIdMap.get(Integer.valueOf(paramInt));
}
final void setParentInjector(String paramString)
{
this.parentInjector = paramString;
}
}
/* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar
* Qualified Name: butterknife.internal.ViewInjector
* JD-Core Version: 0.6.0
*/
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xyz.designabc.mystique.util.transition;
import android.animation.Animator;
import android.animation.TimeInterpolator;
import android.content.Context;
import android.os.Build;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.RequiresApi;
import android.util.ArrayMap;
import android.util.FloatProperty;
import android.util.IntProperty;
import android.util.Property;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import java.util.ArrayList;
/**
* Utility methods for working with animations.
*/
public class AnimUtils {
private AnimUtils() { }
private static Interpolator fastOutSlowIn;
private static Interpolator fastOutLinearIn;
private static Interpolator linearOutSlowIn;
private static Interpolator linear;
public static Interpolator getFastOutSlowInInterpolator(Context context) {
if (fastOutSlowIn == null) {
fastOutSlowIn = AnimationUtils.loadInterpolator(context,
android.R.interpolator.fast_out_slow_in);
}
return fastOutSlowIn;
}
public static Interpolator getFastOutLinearInInterpolator(Context context) {
if (fastOutLinearIn == null) {
fastOutLinearIn = AnimationUtils.loadInterpolator(context,
android.R.interpolator.fast_out_linear_in);
}
return fastOutLinearIn;
}
public static Interpolator getLinearOutSlowInInterpolator(Context context) {
if (linearOutSlowIn == null) {
linearOutSlowIn = AnimationUtils.loadInterpolator(context,
android.R.interpolator.linear_out_slow_in);
}
return linearOutSlowIn;
}
public static Interpolator getLinearInterpolator() {
if (linear == null) {
linear = new LinearInterpolator();
}
return linear;
}
/**
* Linear interpolate between a and b with parameter t.
*/
public static float lerp(float a, float b, float t) {
return a + (b - a) * t;
}
/**
* A delegate for creating a {@link Property} of <code>int</code> type.
*/
public static abstract class IntProp<T> {
public final String name;
public IntProp(String name) {
this.name = name;
}
public abstract void set(T object, int value);
public abstract int get(T object);
}
/**
* The animation framework has an optimization for <code>Properties</code> of type
* <code>int</code> but it was only made public in API24, so wrap the impl in our own type
* and conditionally create the appropriate type, delegating the implementation.
*/
@NonNull
public static <T> Property<T, Integer> createIntProperty(@NonNull final IntProp<T> impl) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
return new IntProperty<T>(impl.name) {
@Override
public Integer get(T object) {
return impl.get(object);
}
@Override
public void setValue(T object, int value) {
impl.set(object, value);
}
};
} else {
return new Property<T, Integer>(Integer.class, impl.name) {
@Override
public Integer get(T object) {
return impl.get(object);
}
@Override
public void set(T object, Integer value) {
impl.set(object, value);
}
};
}
}
/**
* A delegate for creating a {@link Property} of <code>float</code> type.
*/
public static abstract class FloatProp<T> {
public final String name;
protected FloatProp(String name) {
this.name = name;
}
public abstract void set(T object, float value);
public abstract float get(T object);
}
/**
* The animation framework has an optimization for <code>Properties</code> of type
* <code>float</code> but it was only made public in API24, so wrap the impl in our own type
* and conditionally create the appropriate type, delegating the implementation.
*/
@NonNull
public static <T> Property<T, Float> createFloatProperty(@NonNull final FloatProp<T> impl) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
return new FloatProperty<T>(impl.name) {
@Override
public Float get(T object) {
return impl.get(object);
}
@Override
public void setValue(T object, float value) {
impl.set(object, value);
}
};
} else {
return new Property<T, Float>(Float.class, impl.name) {
@Override
public Float get(T object) {
return impl.get(object);
}
@Override
public void set(T object, Float value) {
impl.set(object, value);
}
};
}
}
/**
* https://halfthought.wordpress.com/2014/11/07/reveal-transition/
* <p/>
* Interrupting Activity transitions can yield an OperationNotSupportedException when the
* transition tries to pause the animator. Yikes! We can fix this by wrapping the Animator:
*/
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public static class NoPauseAnimator extends Animator {
private final Animator mAnimator;
private final ArrayMap<AnimatorListener, AnimatorListener> mListeners = new ArrayMap<>();
public NoPauseAnimator(Animator animator) {
mAnimator = animator;
}
@Override
public void addListener(AnimatorListener listener) {
AnimatorListener wrapper = new AnimatorListenerWrapper(this, listener);
if (!mListeners.containsKey(listener)) {
mListeners.put(listener, wrapper);
mAnimator.addListener(wrapper);
}
}
@Override
public void cancel() {
mAnimator.cancel();
}
@Override
public void end() {
mAnimator.end();
}
@Override
public long getDuration() {
return mAnimator.getDuration();
}
@Override
public TimeInterpolator getInterpolator() {
return mAnimator.getInterpolator();
}
@Override
public void setInterpolator(TimeInterpolator timeInterpolator) {
mAnimator.setInterpolator(timeInterpolator);
}
@NonNull
@Override
public ArrayList<AnimatorListener> getListeners() {
return new ArrayList<>(mListeners.keySet());
}
@Override
public long getStartDelay() {
return mAnimator.getStartDelay();
}
@Override
public void setStartDelay(long delayMS) {
mAnimator.setStartDelay(delayMS);
}
@Override
public boolean isPaused() {
return mAnimator.isPaused();
}
@Override
public boolean isRunning() {
return mAnimator.isRunning();
}
@Override
public boolean isStarted() {
return mAnimator.isStarted();
}
/* We don't want to override pause or resume methods because we don't want them
* to affect mAnimator.
public void pause();
public void resume();
public void addPauseListener(AnimatorPauseListener listener);
public void removePauseListener(AnimatorPauseListener listener);
*/
@Override
public void removeAllListeners() {
mListeners.clear();
mAnimator.removeAllListeners();
}
@Override
public void removeListener(AnimatorListener listener) {
AnimatorListener wrapper = mListeners.get(listener);
if (wrapper != null) {
mListeners.remove(listener);
mAnimator.removeListener(wrapper);
}
}
@NonNull
@Override
public Animator setDuration(long durationMS) {
mAnimator.setDuration(durationMS);
return this;
}
@Override
public void setTarget(Object target) {
mAnimator.setTarget(target);
}
@Override
public void setupEndValues() {
mAnimator.setupEndValues();
}
@Override
public void setupStartValues() {
mAnimator.setupStartValues();
}
@Override
public void start() {
mAnimator.start();
}
}
private static class AnimatorListenerWrapper implements Animator.AnimatorListener {
private final Animator mAnimator;
private final Animator.AnimatorListener mListener;
AnimatorListenerWrapper(Animator animator, Animator.AnimatorListener listener) {
mAnimator = animator;
mListener = listener;
}
@Override
public void onAnimationStart(Animator animator) {
mListener.onAnimationStart(mAnimator);
}
@Override
public void onAnimationEnd(Animator animator) {
mListener.onAnimationEnd(mAnimator);
}
@Override
public void onAnimationCancel(Animator animator) {
mListener.onAnimationCancel(mAnimator);
}
@Override
public void onAnimationRepeat(Animator animator) {
mListener.onAnimationRepeat(mAnimator);
}
}
public static void animationOut(@NonNull final View view, final int animation, int delayTime, final boolean isViewGone, @NonNull final Context context) {
view.setVisibility(View.VISIBLE);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
Animation outAnimation = AnimationUtils.loadAnimation(
context.getApplicationContext(), animation);
view.setAnimation(outAnimation);
if (isViewGone)
view.setVisibility(View.GONE);
else
view.setVisibility(View.INVISIBLE);
}
}, delayTime);
}
public static void animationIn(@NonNull final View view, final int animation, int delayTime, @NonNull final Context context) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
Animation inAnimation = AnimationUtils.loadAnimation(
context.getApplicationContext(), animation);
view.setAnimation(inAnimation);
view.setVisibility(View.VISIBLE);
}
}, delayTime);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.consumer.internals;
import org.apache.kafka.clients.GroupRebalanceConfig;
import org.apache.kafka.clients.consumer.CommitFailedException;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetCommitCallback;
import org.apache.kafka.clients.consumer.RetriableCommitFailedException;
import org.apache.kafka.clients.consumer.internals.PartitionAssignor.Assignment;
import org.apache.kafka.clients.consumer.internals.PartitionAssignor.Subscription;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.FencedInstanceIdException;
import org.apache.kafka.common.errors.GroupAuthorizationException;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.errors.RetriableException;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.message.JoinGroupRequestData;
import org.apache.kafka.common.message.JoinGroupResponseData;
import org.apache.kafka.common.message.OffsetCommitRequestData;
import org.apache.kafka.common.message.OffsetCommitResponseData;
import org.apache.kafka.common.metrics.Measurable;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Max;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.requests.OffsetCommitRequest;
import org.apache.kafka.common.requests.OffsetCommitResponse;
import org.apache.kafka.common.requests.OffsetFetchRequest;
import org.apache.kafka.common.requests.OffsetFetchResponse;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Timer;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/**
* This class manages the coordination process with the consumer coordinator.
*/
public final class ConsumerCoordinator extends AbstractCoordinator {
private final GroupRebalanceConfig rebalanceConfig;
private final Logger log;
private final List<PartitionAssignor> assignors;
private final ConsumerMetadata metadata;
private final ConsumerCoordinatorMetrics sensors;
private final SubscriptionState subscriptions;
private final OffsetCommitCallback defaultOffsetCommitCallback;
private final boolean autoCommitEnabled;
private final int autoCommitIntervalMs;
private final ConsumerInterceptors<?, ?> interceptors;
private final AtomicInteger pendingAsyncCommits;
// this collection must be thread-safe because it is modified from the response handler
// of offset commit requests, which may be invoked from the heartbeat thread
private final ConcurrentLinkedQueue<OffsetCommitCompletion> completedOffsetCommits;
private boolean isLeader = false;
private Set<String> joinedSubscription;
private MetadataSnapshot metadataSnapshot;
private MetadataSnapshot assignmentSnapshot;
private Timer nextAutoCommitTimer;
private AtomicBoolean asyncCommitFenced;
// hold onto request&future for committed offset requests to enable async calls.
private PendingCommittedOffsetRequest pendingCommittedOffsetRequest = null;
private static class PendingCommittedOffsetRequest {
private final Set<TopicPartition> requestedPartitions;
private final Generation requestedGeneration;
private final RequestFuture<Map<TopicPartition, OffsetAndMetadata>> response;
private PendingCommittedOffsetRequest(final Set<TopicPartition> requestedPartitions,
final Generation generationAtRequestTime,
final RequestFuture<Map<TopicPartition, OffsetAndMetadata>> response) {
this.requestedPartitions = Objects.requireNonNull(requestedPartitions);
this.response = Objects.requireNonNull(response);
this.requestedGeneration = generationAtRequestTime;
}
private boolean sameRequest(final Set<TopicPartition> currentRequest, final Generation currentGeneration) {
return Objects.equals(requestedGeneration, currentGeneration) && requestedPartitions.equals(currentRequest);
}
}
/**
* Initialize the coordination manager.
*/
public ConsumerCoordinator(GroupRebalanceConfig rebalanceConfig,
LogContext logContext,
ConsumerNetworkClient client,
List<PartitionAssignor> assignors,
ConsumerMetadata metadata,
SubscriptionState subscriptions,
Metrics metrics,
String metricGrpPrefix,
Time time,
boolean autoCommitEnabled,
int autoCommitIntervalMs,
ConsumerInterceptors<?, ?> interceptors) {
super(rebalanceConfig,
logContext,
client,
metrics,
metricGrpPrefix,
time);
this.rebalanceConfig = rebalanceConfig;
this.log = logContext.logger(ConsumerCoordinator.class);
this.metadata = metadata;
this.metadataSnapshot = new MetadataSnapshot(subscriptions, metadata.fetch(), metadata.updateVersion());
this.subscriptions = subscriptions;
this.defaultOffsetCommitCallback = new DefaultOffsetCommitCallback();
this.autoCommitEnabled = autoCommitEnabled;
this.autoCommitIntervalMs = autoCommitIntervalMs;
this.assignors = assignors;
this.completedOffsetCommits = new ConcurrentLinkedQueue<>();
this.sensors = new ConsumerCoordinatorMetrics(metrics, metricGrpPrefix);
this.interceptors = interceptors;
this.pendingAsyncCommits = new AtomicInteger();
this.asyncCommitFenced = new AtomicBoolean(false);
if (autoCommitEnabled)
this.nextAutoCommitTimer = time.timer(autoCommitIntervalMs);
this.metadata.requestUpdate();
}
@Override
public String protocolType() {
return ConsumerProtocol.PROTOCOL_TYPE;
}
@Override
protected JoinGroupRequestData.JoinGroupRequestProtocolCollection metadata() {
log.debug("Joining group with current subscription: {}", subscriptions.subscription());
this.joinedSubscription = subscriptions.subscription();
JoinGroupRequestData.JoinGroupRequestProtocolCollection protocolSet = new JoinGroupRequestData.JoinGroupRequestProtocolCollection();
for (PartitionAssignor assignor : assignors) {
Subscription subscription = assignor.subscription(joinedSubscription);
ByteBuffer metadata = ConsumerProtocol.serializeSubscription(subscription);
protocolSet.add(new JoinGroupRequestData.JoinGroupRequestProtocol()
.setName(assignor.name())
.setMetadata(Utils.toArray(metadata)));
}
return protocolSet;
}
public void updatePatternSubscription(Cluster cluster) {
final Set<String> topicsToSubscribe = cluster.topics().stream()
.filter(subscriptions::matchesSubscribedPattern)
.collect(Collectors.toSet());
if (subscriptions.subscribeFromPattern(topicsToSubscribe))
metadata.requestUpdateForNewTopics();
}
private PartitionAssignor lookupAssignor(String name) {
for (PartitionAssignor assignor : this.assignors) {
if (assignor.name().equals(name))
return assignor;
}
return null;
}
private void maybeUpdateJoinedSubscription(Set<TopicPartition> assignedPartitions) {
// Check if the assignment contains some topics that were not in the original
// subscription, if yes we will obey what leader has decided and add these topics
// into the subscriptions as long as they still match the subscribed pattern
Set<String> addedTopics = new HashSet<>();
//this is a copy because its handed to listener below
for (TopicPartition tp : assignedPartitions) {
if (!joinedSubscription.contains(tp.topic()))
addedTopics.add(tp.topic());
}
if (!addedTopics.isEmpty()) {
Set<String> newSubscription = new HashSet<>(subscriptions.subscription());
Set<String> newJoinedSubscription = new HashSet<>(joinedSubscription);
newSubscription.addAll(addedTopics);
newJoinedSubscription.addAll(addedTopics);
if (this.subscriptions.subscribeFromPattern(newSubscription))
metadata.requestUpdateForNewTopics();
this.joinedSubscription = newJoinedSubscription;
}
}
@Override
protected void onJoinComplete(int generation,
String memberId,
String assignmentStrategy,
ByteBuffer assignmentBuffer) {
// only the leader is responsible for monitoring for metadata changes (i.e. partition changes)
if (!isLeader)
assignmentSnapshot = null;
PartitionAssignor assignor = lookupAssignor(assignmentStrategy);
if (assignor == null)
throw new IllegalStateException("Coordinator selected invalid assignment protocol: " + assignmentStrategy);
Assignment assignment = ConsumerProtocol.deserializeAssignment(assignmentBuffer);
if (!subscriptions.assignFromSubscribed(assignment.partitions())) {
log.warn("We received an assignment {} that doesn't match our current subscription {}; it is likely " +
"that the subscription has changed since we joined the group. Will try re-join the group with current subscription",
assignment.partitions(), subscriptions.prettyString());
requestRejoin();
return;
}
Set<TopicPartition> assignedPartitions = subscriptions.assignedPartitions();
// The leader may have assigned partitions which match our subscription pattern, but which
// were not explicitly requested, so we update the joined subscription here.
maybeUpdateJoinedSubscription(assignedPartitions);
// give the assignor a chance to update internal state based on the received assignment
assignor.onAssignment(assignment, generation);
// reschedule the auto commit starting from now
if (autoCommitEnabled)
this.nextAutoCommitTimer.updateAndReset(autoCommitIntervalMs);
// execute the user's callback after rebalance
ConsumerRebalanceListener listener = subscriptions.rebalanceListener();
log.info("Setting newly assigned partitions: {}", Utils.join(assignedPartitions, ", "));
try {
listener.onPartitionsAssigned(assignedPartitions);
} catch (WakeupException | InterruptException e) {
throw e;
} catch (Exception e) {
log.error("User provided listener {} failed on partition assignment", listener.getClass().getName(), e);
}
}
void maybeUpdateSubscriptionMetadata() {
int version = metadata.updateVersion();
if (version > metadataSnapshot.version) {
Cluster cluster = metadata.fetch();
if (subscriptions.hasPatternSubscription())
updatePatternSubscription(cluster);
// Update the current snapshot, which will be used to check for subscription
// changes that would require a rebalance (e.g. new partitions).
metadataSnapshot = new MetadataSnapshot(subscriptions, cluster, version);
}
}
/**
* Poll for coordinator events. This ensures that the coordinator is known and that the consumer
* has joined the group (if it is using group management). This also handles periodic offset commits
* if they are enabled.
* <p>
* Returns early if the timeout expires
*
* @param timer Timer bounding how long this method can block
* @return true iff the operation succeeded
*/
public boolean poll(Timer timer) {
maybeUpdateSubscriptionMetadata();
invokeCompletedOffsetCommitCallbacks();
if (subscriptions.partitionsAutoAssigned()) {
// Always update the heartbeat last poll time so that the heartbeat thread does not leave the
// group proactively due to application inactivity even if (say) the coordinator cannot be found.
pollHeartbeat(timer.currentTimeMs());
if (coordinatorUnknown() && !ensureCoordinatorReady(timer)) {
return false;
}
if (rejoinNeededOrPending()) {
// due to a race condition between the initial metadata fetch and the initial rebalance,
// we need to ensure that the metadata is fresh before joining initially. This ensures
// that we have matched the pattern against the cluster's topics at least once before joining.
if (subscriptions.hasPatternSubscription()) {
// For consumer group that uses pattern-based subscription, after a topic is created,
// any consumer that discovers the topic after metadata refresh can trigger rebalance
// across the entire consumer group. Multiple rebalances can be triggered after one topic
// creation if consumers refresh metadata at vastly different times. We can significantly
// reduce the number of rebalances caused by single topic creation by asking consumer to
// refresh metadata before re-joining the group as long as the refresh backoff time has
// passed.
if (this.metadata.timeToAllowUpdate(timer.currentTimeMs()) == 0) {
this.metadata.requestUpdate();
}
if (!client.ensureFreshMetadata(timer)) {
return false;
}
maybeUpdateSubscriptionMetadata();
}
if (!ensureActiveGroup(timer)) {
return false;
}
}
} else {
// For manually assigned partitions, if there are no ready nodes, await metadata.
// If connections to all nodes fail, wakeups triggered while attempting to send fetch
// requests result in polls returning immediately, causing a tight loop of polls. Without
// the wakeup, poll() with no channels would block for the timeout, delaying re-connection.
// awaitMetadataUpdate() initiates new connections with configured backoff and avoids the busy loop.
// When group management is used, metadata wait is already performed for this scenario as
// coordinator is unknown, hence this check is not required.
if (metadata.updateRequested() && !client.hasReadyNodes(timer.currentTimeMs())) {
client.awaitMetadataUpdate(timer);
}
}
maybeAutoCommitOffsetsAsync(timer.currentTimeMs());
return true;
}
/**
* Return the time to the next needed invocation of {@link #poll(Timer)}.
* @param now current time in milliseconds
* @return the maximum time in milliseconds the caller should wait before the next invocation of poll()
*/
public long timeToNextPoll(long now) {
if (!autoCommitEnabled)
return timeToNextHeartbeat(now);
return Math.min(nextAutoCommitTimer.remainingMs(), timeToNextHeartbeat(now));
}
private void updateGroupSubscription(Set<String> topics) {
// the leader will begin watching for changes to any of the topics the group is interested in,
// which ensures that all metadata changes will eventually be seen
if (this.subscriptions.groupSubscribe(topics))
metadata.requestUpdateForNewTopics();
// update metadata (if needed) and keep track of the metadata used for assignment so that
// we can check after rebalance completion whether anything has changed
if (!client.ensureFreshMetadata(time.timer(Long.MAX_VALUE)))
throw new TimeoutException();
maybeUpdateSubscriptionMetadata();
}
@Override
protected Map<String, ByteBuffer> performAssignment(String leaderId,
String assignmentStrategy,
List<JoinGroupResponseData.JoinGroupResponseMember> allSubscriptions) {
PartitionAssignor assignor = lookupAssignor(assignmentStrategy);
if (assignor == null)
throw new IllegalStateException("Coordinator selected invalid assignment protocol: " + assignmentStrategy);
Set<String> allSubscribedTopics = new HashSet<>();
Map<String, Subscription> subscriptions = new HashMap<>();
for (JoinGroupResponseData.JoinGroupResponseMember memberSubscription : allSubscriptions) {
Subscription subscription = ConsumerProtocol.buildSubscription(ByteBuffer.wrap(memberSubscription.metadata()),
Optional.ofNullable(memberSubscription.groupInstanceId()));
subscriptions.put(memberSubscription.memberId(), subscription);
allSubscribedTopics.addAll(subscription.topics());
}
// the leader will begin watching for changes to any of the topics the group is interested in,
// which ensures that all metadata changes will eventually be seen
updateGroupSubscription(allSubscribedTopics);
isLeader = true;
log.debug("Performing assignment using strategy {} with subscriptions {}", assignor.name(), subscriptions);
Map<String, Assignment> assignment = assignor.assign(metadata.fetch(), subscriptions);
// user-customized assignor may have created some topics that are not in the subscription list
// and assign their partitions to the members; in this case we would like to update the leader's
// own metadata with the newly added topics so that it will not trigger a subsequent rebalance
// when these topics gets updated from metadata refresh.
//
// TODO: this is a hack and not something we want to support long-term unless we push regex into the protocol
// we may need to modify the PartitionAssignor API to better support this case.
Set<String> assignedTopics = new HashSet<>();
for (Assignment assigned : assignment.values()) {
for (TopicPartition tp : assigned.partitions())
assignedTopics.add(tp.topic());
}
if (!assignedTopics.containsAll(allSubscribedTopics)) {
Set<String> notAssignedTopics = new HashSet<>(allSubscribedTopics);
notAssignedTopics.removeAll(assignedTopics);
log.warn("The following subscribed topics are not assigned to any members: {} ", notAssignedTopics);
}
if (!allSubscribedTopics.containsAll(assignedTopics)) {
Set<String> newlyAddedTopics = new HashSet<>(assignedTopics);
newlyAddedTopics.removeAll(allSubscribedTopics);
log.info("The following not-subscribed topics are assigned, and their metadata will be " +
"fetched from the brokers: {}", newlyAddedTopics);
allSubscribedTopics.addAll(assignedTopics);
updateGroupSubscription(allSubscribedTopics);
}
assignmentSnapshot = metadataSnapshot;
log.debug("Finished assignment for group: {}", assignment);
Map<String, ByteBuffer> groupAssignment = new HashMap<>();
for (Map.Entry<String, Assignment> assignmentEntry : assignment.entrySet()) {
ByteBuffer buffer = ConsumerProtocol.serializeAssignment(assignmentEntry.getValue());
groupAssignment.put(assignmentEntry.getKey(), buffer);
}
return groupAssignment;
}
@Override
protected void onJoinPrepare(int generation, String memberId) {
// commit offsets prior to rebalance if auto-commit enabled
maybeAutoCommitOffsetsSync(time.timer(rebalanceConfig.rebalanceTimeoutMs));
// execute the user's callback before rebalance
ConsumerRebalanceListener listener = subscriptions.rebalanceListener();
Set<TopicPartition> revoked = subscriptions.assignedPartitions();
log.info("Revoking previously assigned partitions {}", revoked);
try {
listener.onPartitionsRevoked(revoked);
} catch (WakeupException | InterruptException e) {
throw e;
} catch (Exception e) {
log.error("User provided listener {} failed on partition revocation", listener.getClass().getName(), e);
}
isLeader = false;
subscriptions.resetGroupSubscription();
}
@Override
public boolean rejoinNeededOrPending() {
if (!subscriptions.partitionsAutoAssigned())
return false;
// we need to rejoin if we performed the assignment and metadata has changed
if (assignmentSnapshot != null && !assignmentSnapshot.matches(metadataSnapshot))
return true;
// we need to join if our subscription has changed since the last join
if (joinedSubscription != null && !joinedSubscription.equals(subscriptions.subscription()))
return true;
return super.rejoinNeededOrPending();
}
/**
* Refresh the committed offsets for provided partitions.
*
* @param timer Timer bounding how long this method can block
* @return true iff the operation completed within the timeout
*/
public boolean refreshCommittedOffsetsIfNeeded(Timer timer) {
final Set<TopicPartition> missingFetchPositions = subscriptions.missingFetchPositions();
final Map<TopicPartition, OffsetAndMetadata> offsets = fetchCommittedOffsets(missingFetchPositions, timer);
if (offsets == null) return false;
for (final Map.Entry<TopicPartition, OffsetAndMetadata> entry : offsets.entrySet()) {
final TopicPartition tp = entry.getKey();
final OffsetAndMetadata offsetAndMetadata = entry.getValue();
final ConsumerMetadata.LeaderAndEpoch leaderAndEpoch = metadata.leaderAndEpoch(tp);
final SubscriptionState.FetchPosition position = new SubscriptionState.FetchPosition(
offsetAndMetadata.offset(), offsetAndMetadata.leaderEpoch(),
leaderAndEpoch);
log.info("Setting offset for partition {} to the committed offset {}", tp, position);
entry.getValue().leaderEpoch().ifPresent(epoch -> this.metadata.updateLastSeenEpochIfNewer(entry.getKey(), epoch));
this.subscriptions.seekUnvalidated(tp, position);
}
return true;
}
/**
* Fetch the current committed offsets from the coordinator for a set of partitions.
*
* @param partitions The partitions to fetch offsets for
* @return A map from partition to the committed offset or null if the operation timed out
*/
public Map<TopicPartition, OffsetAndMetadata> fetchCommittedOffsets(final Set<TopicPartition> partitions,
final Timer timer) {
if (partitions.isEmpty()) return Collections.emptyMap();
final Generation generation = generation();
if (pendingCommittedOffsetRequest != null && !pendingCommittedOffsetRequest.sameRequest(partitions, generation)) {
// if we were waiting for a different request, then just clear it.
pendingCommittedOffsetRequest = null;
}
do {
if (!ensureCoordinatorReady(timer)) return null;
// contact coordinator to fetch committed offsets
final RequestFuture<Map<TopicPartition, OffsetAndMetadata>> future;
if (pendingCommittedOffsetRequest != null) {
future = pendingCommittedOffsetRequest.response;
} else {
future = sendOffsetFetchRequest(partitions);
pendingCommittedOffsetRequest = new PendingCommittedOffsetRequest(partitions, generation, future);
}
client.poll(future, timer);
if (future.isDone()) {
pendingCommittedOffsetRequest = null;
if (future.succeeded()) {
return future.value();
} else if (!future.isRetriable()) {
throw future.exception();
} else {
timer.sleep(rebalanceConfig.retryBackoffMs);
}
} else {
return null;
}
} while (timer.notExpired());
return null;
}
public void close(final Timer timer) {
// we do not need to re-enable wakeups since we are closing already
client.disableWakeups();
try {
maybeAutoCommitOffsetsSync(timer);
while (pendingAsyncCommits.get() > 0 && timer.notExpired()) {
ensureCoordinatorReady(timer);
client.poll(timer);
invokeCompletedOffsetCommitCallbacks();
}
} finally {
super.close(timer);
}
}
// visible for testing
void invokeCompletedOffsetCommitCallbacks() {
if (asyncCommitFenced.get()) {
throw new FencedInstanceIdException("Get fenced exception for group.instance.id "
+ rebalanceConfig.groupInstanceId.orElse("unset_instance_id")
+ ", current member.id is " + memberId());
}
while (true) {
OffsetCommitCompletion completion = completedOffsetCommits.poll();
if (completion == null) {
break;
}
completion.invoke();
}
}
public void commitOffsetsAsync(final Map<TopicPartition, OffsetAndMetadata> offsets, final OffsetCommitCallback callback) {
invokeCompletedOffsetCommitCallbacks();
if (!coordinatorUnknown()) {
doCommitOffsetsAsync(offsets, callback);
} else {
// we don't know the current coordinator, so try to find it and then send the commit
// or fail (we don't want recursive retries which can cause offset commits to arrive
// out of order). Note that there may be multiple offset commits chained to the same
// coordinator lookup request. This is fine because the listeners will be invoked in
// the same order that they were added. Note also that AbstractCoordinator prevents
// multiple concurrent coordinator lookup requests.
pendingAsyncCommits.incrementAndGet();
lookupCoordinator().addListener(new RequestFutureListener<Void>() {
@Override
public void onSuccess(Void value) {
pendingAsyncCommits.decrementAndGet();
doCommitOffsetsAsync(offsets, callback);
client.pollNoWakeup();
}
@Override
public void onFailure(RuntimeException e) {
pendingAsyncCommits.decrementAndGet();
completedOffsetCommits.add(new OffsetCommitCompletion(callback, offsets,
new RetriableCommitFailedException(e)));
}
});
}
// ensure the commit has a chance to be transmitted (without blocking on its completion).
// Note that commits are treated as heartbeats by the coordinator, so there is no need to
// explicitly allow heartbeats through delayed task execution.
client.pollNoWakeup();
}
private void doCommitOffsetsAsync(final Map<TopicPartition, OffsetAndMetadata> offsets, final OffsetCommitCallback callback) {
RequestFuture<Void> future = sendOffsetCommitRequest(offsets);
final OffsetCommitCallback cb = callback == null ? defaultOffsetCommitCallback : callback;
future.addListener(new RequestFutureListener<Void>() {
@Override
public void onSuccess(Void value) {
if (interceptors != null)
interceptors.onCommit(offsets);
completedOffsetCommits.add(new OffsetCommitCompletion(cb, offsets, null));
}
@Override
public void onFailure(RuntimeException e) {
Exception commitException = e;
if (e instanceof RetriableException) {
commitException = new RetriableCommitFailedException(e);
}
completedOffsetCommits.add(new OffsetCommitCompletion(cb, offsets, commitException));
if (commitException instanceof FencedInstanceIdException) {
asyncCommitFenced.set(true);
}
}
});
}
/**
* Commit offsets synchronously. This method will retry until the commit completes successfully
* or an unrecoverable error is encountered.
* @param offsets The offsets to be committed
* @throws org.apache.kafka.common.errors.AuthorizationException if the consumer is not authorized to the group
* or to any of the specified partitions. See the exception for more details
* @throws CommitFailedException if an unrecoverable error occurs before the commit can be completed
* @throws FencedInstanceIdException if a static member gets fenced
* @return If the offset commit was successfully sent and a successful response was received from
* the coordinator
*/
public boolean commitOffsetsSync(Map<TopicPartition, OffsetAndMetadata> offsets, Timer timer) {
invokeCompletedOffsetCommitCallbacks();
if (offsets.isEmpty())
return true;
do {
if (coordinatorUnknown() && !ensureCoordinatorReady(timer)) {
return false;
}
RequestFuture<Void> future = sendOffsetCommitRequest(offsets);
client.poll(future, timer);
// We may have had in-flight offset commits when the synchronous commit began. If so, ensure that
// the corresponding callbacks are invoked prior to returning in order to preserve the order that
// the offset commits were applied.
invokeCompletedOffsetCommitCallbacks();
if (future.succeeded()) {
if (interceptors != null)
interceptors.onCommit(offsets);
return true;
}
if (future.failed() && !future.isRetriable())
throw future.exception();
timer.sleep(rebalanceConfig.retryBackoffMs);
} while (timer.notExpired());
return false;
}
public void maybeAutoCommitOffsetsAsync(long now) {
if (autoCommitEnabled) {
nextAutoCommitTimer.update(now);
if (nextAutoCommitTimer.isExpired()) {
nextAutoCommitTimer.reset(autoCommitIntervalMs);
doAutoCommitOffsetsAsync();
}
}
}
private void doAutoCommitOffsetsAsync() {
Map<TopicPartition, OffsetAndMetadata> allConsumedOffsets = subscriptions.allConsumed();
log.debug("Sending asynchronous auto-commit of offsets {}", allConsumedOffsets);
commitOffsetsAsync(allConsumedOffsets, (offsets, exception) -> {
if (exception != null) {
if (exception instanceof RetriableException) {
log.debug("Asynchronous auto-commit of offsets {} failed due to retriable error: {}", offsets,
exception);
nextAutoCommitTimer.updateAndReset(rebalanceConfig.retryBackoffMs);
} else {
log.warn("Asynchronous auto-commit of offsets {} failed: {}", offsets, exception.getMessage());
}
} else {
log.debug("Completed asynchronous auto-commit of offsets {}", offsets);
}
});
}
private void maybeAutoCommitOffsetsSync(Timer timer) {
if (autoCommitEnabled) {
Map<TopicPartition, OffsetAndMetadata> allConsumedOffsets = subscriptions.allConsumed();
try {
log.debug("Sending synchronous auto-commit of offsets {}", allConsumedOffsets);
if (!commitOffsetsSync(allConsumedOffsets, timer))
log.debug("Auto-commit of offsets {} timed out before completion", allConsumedOffsets);
} catch (WakeupException | InterruptException e) {
log.debug("Auto-commit of offsets {} was interrupted before completion", allConsumedOffsets);
// rethrow wakeups since they are triggered by the user
throw e;
} catch (Exception e) {
// consistent with async auto-commit failures, we do not propagate the exception
log.warn("Synchronous auto-commit of offsets {} failed: {}", allConsumedOffsets, e.getMessage());
}
}
}
private class DefaultOffsetCommitCallback implements OffsetCommitCallback {
@Override
public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
if (exception != null)
log.error("Offset commit with offsets {} failed", offsets, exception);
}
}
/**
* Commit offsets for the specified list of topics and partitions. This is a non-blocking call
* which returns a request future that can be polled in the case of a synchronous commit or ignored in the
* asynchronous case.
*
* @param offsets The list of offsets per partition that should be committed.
* @return A request future whose value indicates whether the commit was successful or not
*/
private RequestFuture<Void> sendOffsetCommitRequest(final Map<TopicPartition, OffsetAndMetadata> offsets) {
if (offsets.isEmpty())
return RequestFuture.voidSuccess();
Node coordinator = checkAndGetCoordinator();
if (coordinator == null)
return RequestFuture.coordinatorNotAvailable();
// create the offset commit request
Map<String, OffsetCommitRequestData.OffsetCommitRequestTopic> requestTopicDataMap = new HashMap<>();
for (Map.Entry<TopicPartition, OffsetAndMetadata> entry : offsets.entrySet()) {
TopicPartition topicPartition = entry.getKey();
OffsetAndMetadata offsetAndMetadata = entry.getValue();
if (offsetAndMetadata.offset() < 0) {
return RequestFuture.failure(new IllegalArgumentException("Invalid offset: " + offsetAndMetadata.offset()));
}
OffsetCommitRequestData.OffsetCommitRequestTopic topic = requestTopicDataMap
.getOrDefault(topicPartition.topic(),
new OffsetCommitRequestData.OffsetCommitRequestTopic()
.setName(topicPartition.topic())
);
topic.partitions().add(new OffsetCommitRequestData.OffsetCommitRequestPartition()
.setPartitionIndex(topicPartition.partition())
.setCommittedOffset(offsetAndMetadata.offset())
.setCommittedLeaderEpoch(offsetAndMetadata.leaderEpoch().orElse(RecordBatch.NO_PARTITION_LEADER_EPOCH))
.setCommittedMetadata(offsetAndMetadata.metadata())
);
requestTopicDataMap.put(topicPartition.topic(), topic);
}
final Generation generation;
if (subscriptions.partitionsAutoAssigned()) {
generation = generation();
// if the generation is null, we are not part of an active group (and we expect to be).
// the only thing we can do is fail the commit and let the user rejoin the group in poll()
if (generation == null) {
log.info("Failing OffsetCommit request since the consumer is not part of an active group");
return RequestFuture.failure(new CommitFailedException());
}
} else
generation = Generation.NO_GENERATION;
OffsetCommitRequest.Builder builder = new OffsetCommitRequest.Builder(
new OffsetCommitRequestData()
.setGroupId(this.rebalanceConfig.groupId)
.setGenerationId(generation.generationId)
.setMemberId(generation.memberId)
.setGroupInstanceId(rebalanceConfig.groupInstanceId.orElse(null))
.setTopics(new ArrayList<>(requestTopicDataMap.values()))
);
log.trace("Sending OffsetCommit request with {} to coordinator {}", offsets, coordinator);
return client.send(coordinator, builder)
.compose(new OffsetCommitResponseHandler(offsets));
}
private class OffsetCommitResponseHandler extends CoordinatorResponseHandler<OffsetCommitResponse, Void> {
private final Map<TopicPartition, OffsetAndMetadata> offsets;
private OffsetCommitResponseHandler(Map<TopicPartition, OffsetAndMetadata> offsets) {
this.offsets = offsets;
}
@Override
public void handle(OffsetCommitResponse commitResponse, RequestFuture<Void> future) {
sensors.commitLatency.record(response.requestLatencyMs());
Set<String> unauthorizedTopics = new HashSet<>();
for (OffsetCommitResponseData.OffsetCommitResponseTopic topic : commitResponse.data().topics()) {
for (OffsetCommitResponseData.OffsetCommitResponsePartition partition : topic.partitions()) {
TopicPartition tp = new TopicPartition(topic.name(), partition.partitionIndex());
OffsetAndMetadata offsetAndMetadata = this.offsets.get(tp);
long offset = offsetAndMetadata.offset();
Errors error = Errors.forCode(partition.errorCode());
if (error == Errors.NONE) {
log.debug("Committed offset {} for partition {}", offset, tp);
} else {
if (error.exception() instanceof RetriableException) {
log.warn("Offset commit failed on partition {} at offset {}: {}", tp, offset, error.message());
} else {
log.error("Offset commit failed on partition {} at offset {}: {}", tp, offset, error.message());
}
if (error == Errors.GROUP_AUTHORIZATION_FAILED) {
future.raise(new GroupAuthorizationException(rebalanceConfig.groupId));
return;
} else if (error == Errors.TOPIC_AUTHORIZATION_FAILED) {
unauthorizedTopics.add(tp.topic());
} else if (error == Errors.OFFSET_METADATA_TOO_LARGE
|| error == Errors.INVALID_COMMIT_OFFSET_SIZE) {
// raise the error to the user
future.raise(error);
return;
} else if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS
|| error == Errors.UNKNOWN_TOPIC_OR_PARTITION) {
// just retry
future.raise(error);
return;
} else if (error == Errors.COORDINATOR_NOT_AVAILABLE
|| error == Errors.NOT_COORDINATOR
|| error == Errors.REQUEST_TIMED_OUT) {
markCoordinatorUnknown();
future.raise(error);
return;
} else if (error == Errors.FENCED_INSTANCE_ID) {
log.error("Received fatal exception: group.instance.id gets fenced");
future.raise(error);
return;
} else if (error == Errors.REBALANCE_IN_PROGRESS) {
/* Consumer never tries to commit offset in between join-group and sync-group,
* and hence on broker-side it is not expected to see a commit offset request
* during CompletingRebalance phase; if it ever happens then broker would return
* this error. In this case we should just treat as a fatal CommitFailed exception.
* However, we do not need to reset generations and just request re-join, such that
* if the caller decides to proceed and poll, it would still try to proceed and re-join normally.
*/
requestRejoin();
future.raise(new CommitFailedException());
return;
} else if (error == Errors.UNKNOWN_MEMBER_ID
|| error == Errors.ILLEGAL_GENERATION) {
// need to reset generation and re-join group
resetGeneration();
future.raise(new CommitFailedException());
return;
} else {
future.raise(new KafkaException("Unexpected error in commit: " + error.message()));
return;
}
}
}
}
if (!unauthorizedTopics.isEmpty()) {
log.error("Not authorized to commit to topics {}", unauthorizedTopics);
future.raise(new TopicAuthorizationException(unauthorizedTopics));
} else {
future.complete(null);
}
}
}
/**
* Fetch the committed offsets for a set of partitions. This is a non-blocking call. The
* returned future can be polled to get the actual offsets returned from the broker.
*
* @param partitions The set of partitions to get offsets for.
* @return A request future containing the committed offsets.
*/
private RequestFuture<Map<TopicPartition, OffsetAndMetadata>> sendOffsetFetchRequest(Set<TopicPartition> partitions) {
Node coordinator = checkAndGetCoordinator();
if (coordinator == null)
return RequestFuture.coordinatorNotAvailable();
log.debug("Fetching committed offsets for partitions: {}", partitions);
// construct the request
OffsetFetchRequest.Builder requestBuilder = new OffsetFetchRequest.Builder(this.rebalanceConfig.groupId,
new ArrayList<>(partitions));
// send the request with a callback
return client.send(coordinator, requestBuilder)
.compose(new OffsetFetchResponseHandler());
}
private class OffsetFetchResponseHandler extends CoordinatorResponseHandler<OffsetFetchResponse, Map<TopicPartition, OffsetAndMetadata>> {
@Override
public void handle(OffsetFetchResponse response, RequestFuture<Map<TopicPartition, OffsetAndMetadata>> future) {
if (response.hasError()) {
Errors error = response.error();
log.debug("Offset fetch failed: {}", error.message());
if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS) {
// just retry
future.raise(error);
} else if (error == Errors.NOT_COORDINATOR) {
// re-discover the coordinator and retry
markCoordinatorUnknown();
future.raise(error);
} else if (error == Errors.GROUP_AUTHORIZATION_FAILED) {
future.raise(new GroupAuthorizationException(rebalanceConfig.groupId));
} else {
future.raise(new KafkaException("Unexpected error in fetch offset response: " + error.message()));
}
return;
}
Set<String> unauthorizedTopics = null;
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(response.responseData().size());
for (Map.Entry<TopicPartition, OffsetFetchResponse.PartitionData> entry : response.responseData().entrySet()) {
TopicPartition tp = entry.getKey();
OffsetFetchResponse.PartitionData data = entry.getValue();
if (data.hasError()) {
Errors error = data.error;
log.debug("Failed to fetch offset for partition {}: {}", tp, error.message());
if (error == Errors.UNKNOWN_TOPIC_OR_PARTITION) {
future.raise(new KafkaException("Topic or Partition " + tp + " does not exist"));
return;
} else if (error == Errors.TOPIC_AUTHORIZATION_FAILED) {
if (unauthorizedTopics == null) {
unauthorizedTopics = new HashSet<>();
}
unauthorizedTopics.add(tp.topic());
} else {
future.raise(new KafkaException("Unexpected error in fetch offset response for partition " +
tp + ": " + error.message()));
return;
}
} else if (data.offset >= 0) {
// record the position with the offset (-1 indicates no committed offset to fetch)
offsets.put(tp, new OffsetAndMetadata(data.offset, data.leaderEpoch, data.metadata));
} else {
log.info("Found no committed offset for partition {}", tp);
}
}
if (unauthorizedTopics != null) {
future.raise(new TopicAuthorizationException(unauthorizedTopics));
} else {
future.complete(offsets);
}
}
}
private class ConsumerCoordinatorMetrics {
private final String metricGrpName;
private final Sensor commitLatency;
private ConsumerCoordinatorMetrics(Metrics metrics, String metricGrpPrefix) {
this.metricGrpName = metricGrpPrefix + "-coordinator-metrics";
this.commitLatency = metrics.sensor("commit-latency");
this.commitLatency.add(metrics.metricName("commit-latency-avg",
this.metricGrpName,
"The average time taken for a commit request"), new Avg());
this.commitLatency.add(metrics.metricName("commit-latency-max",
this.metricGrpName,
"The max time taken for a commit request"), new Max());
this.commitLatency.add(createMeter(metrics, metricGrpName, "commit", "commit calls"));
Measurable numParts = (config, now) -> subscriptions.numAssignedPartitions();
metrics.addMetric(metrics.metricName("assigned-partitions",
this.metricGrpName,
"The number of partitions currently assigned to this consumer"), numParts);
}
}
private static class MetadataSnapshot {
private final int version;
private final Map<String, Integer> partitionsPerTopic;
private MetadataSnapshot(SubscriptionState subscription, Cluster cluster, int version) {
Map<String, Integer> partitionsPerTopic = new HashMap<>();
for (String topic : subscription.groupSubscription())
partitionsPerTopic.put(topic, cluster.partitionCountForTopic(topic));
this.partitionsPerTopic = partitionsPerTopic;
this.version = version;
}
boolean matches(MetadataSnapshot other) {
return version == other.version || partitionsPerTopic.equals(other.partitionsPerTopic);
}
}
private static class OffsetCommitCompletion {
private final OffsetCommitCallback callback;
private final Map<TopicPartition, OffsetAndMetadata> offsets;
private final Exception exception;
private OffsetCommitCompletion(OffsetCommitCallback callback, Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
this.callback = callback;
this.offsets = offsets;
this.exception = exception;
}
public void invoke() {
if (callback != null)
callback.onComplete(offsets, exception);
}
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.hash;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.math.LongMath;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import java.math.RoundingMode;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLongArray;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Collections of strategies of generating the k * log(M) bits required for an element to be mapped
* to a BloomFilter of M bits and k hash functions. These strategies are part of the serialized form
* of the Bloom filters that use them, thus they must be preserved as is (no updates allowed, only
* introduction of new versions).
*
* <p>Important: the order of the constants cannot change, and they cannot be deleted - we depend on
* their ordinal for BloomFilter serialization.
*
* @author Dimitris Andreou
* @author Kurt Alfred Kluever
*/
enum BloomFilterStrategies implements BloomFilter.Strategy {
/**
* See "Less Hashing, Same Performance: Building a Better Bloom Filter" by Adam Kirsch and Michael
* Mitzenmacher. The paper argues that this trick doesn't significantly deteriorate the
* performance of a Bloom filter (yet only needs two 32bit hash functions).
*/
MURMUR128_MITZ_32() {
@Override
public <T> boolean put(
T object, Funnel<? super T> funnel, int numHashFunctions, LockFreeBitArray bits) {
long bitSize = bits.bitSize();
long hash64 = Hashing.murmur3_128().hashObject(object, funnel).asLong();
int hash1 = (int) hash64;
int hash2 = (int) (hash64 >>> 32);
boolean bitsChanged = false;
for (int i = 1; i <= numHashFunctions; i++) {
int combinedHash = hash1 + (i * hash2);
// Flip all the bits if it's negative (guaranteed positive number)
if (combinedHash < 0) {
combinedHash = ~combinedHash;
}
bitsChanged |= bits.set(combinedHash % bitSize);
}
return bitsChanged;
}
@Override
public <T> boolean mightContain(
T object, Funnel<? super T> funnel, int numHashFunctions, LockFreeBitArray bits) {
long bitSize = bits.bitSize();
long hash64 = Hashing.murmur3_128().hashObject(object, funnel).asLong();
int hash1 = (int) hash64;
int hash2 = (int) (hash64 >>> 32);
for (int i = 1; i <= numHashFunctions; i++) {
int combinedHash = hash1 + (i * hash2);
// Flip all the bits if it's negative (guaranteed positive number)
if (combinedHash < 0) {
combinedHash = ~combinedHash;
}
if (!bits.get(combinedHash % bitSize)) {
return false;
}
}
return true;
}
},
/**
* This strategy uses all 128 bits of {@link Hashing#murmur3_128} when hashing. It looks different
* than the implementation in MURMUR128_MITZ_32 because we're avoiding the multiplication in the
* loop and doing a (much simpler) += hash2. We're also changing the index to a positive number by
* AND'ing with Long.MAX_VALUE instead of flipping the bits.
*/
MURMUR128_MITZ_64() {
@Override
public <T> boolean put(
T object, Funnel<? super T> funnel, int numHashFunctions, LockFreeBitArray bits) {
long bitSize = bits.bitSize();
byte[] bytes = Hashing.murmur3_128().hashObject(object, funnel).getBytesInternal();
long hash1 = lowerEight(bytes);
long hash2 = upperEight(bytes);
boolean bitsChanged = false;
long combinedHash = hash1;
for (int i = 0; i < numHashFunctions; i++) {
// Make the combined hash positive and indexable
bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize);
combinedHash += hash2;
}
return bitsChanged;
}
@Override
public <T> boolean mightContain(
T object, Funnel<? super T> funnel, int numHashFunctions, LockFreeBitArray bits) {
long bitSize = bits.bitSize();
byte[] bytes = Hashing.murmur3_128().hashObject(object, funnel).getBytesInternal();
long hash1 = lowerEight(bytes);
long hash2 = upperEight(bytes);
long combinedHash = hash1;
for (int i = 0; i < numHashFunctions; i++) {
// Make the combined hash positive and indexable
if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) {
return false;
}
combinedHash += hash2;
}
return true;
}
private /* static */ long lowerEight(byte[] bytes) {
return Longs.fromBytes(
bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]);
}
private /* static */ long upperEight(byte[] bytes) {
return Longs.fromBytes(
bytes[15], bytes[14], bytes[13], bytes[12], bytes[11], bytes[10], bytes[9], bytes[8]);
}
};
/**
* Models a lock-free array of bits.
*
* <p>We use this instead of java.util.BitSet because we need access to the array of longs and we
* need compare-and-swap.
*/
static final class LockFreeBitArray {
private static final int LONG_ADDRESSABLE_BITS = 6;
final AtomicLongArray data;
private final LongAddable bitCount;
LockFreeBitArray(long bits) {
checkArgument(bits > 0, "data length is zero!");
// Avoid delegating to this(long[]), since AtomicLongArray(long[]) will clone its input and
// thus double memory usage.
this.data =
new AtomicLongArray(Ints.checkedCast(LongMath.divide(bits, 64, RoundingMode.CEILING)));
this.bitCount = LongAddables.create();
}
// Used by serialization
LockFreeBitArray(long[] data) {
checkArgument(data.length > 0, "data length is zero!");
this.data = new AtomicLongArray(data);
this.bitCount = LongAddables.create();
long bitCount = 0;
for (long value : data) {
bitCount += Long.bitCount(value);
}
this.bitCount.add(bitCount);
}
/** Returns true if the bit changed value. */
boolean set(long bitIndex) {
if (get(bitIndex)) {
return false;
}
int longIndex = (int) (bitIndex >>> LONG_ADDRESSABLE_BITS);
long mask = 1L << bitIndex; // only cares about low 6 bits of bitIndex
long oldValue;
long newValue;
do {
oldValue = data.get(longIndex);
newValue = oldValue | mask;
if (oldValue == newValue) {
return false;
}
} while (!data.compareAndSet(longIndex, oldValue, newValue));
// We turned the bit on, so increment bitCount.
bitCount.increment();
return true;
}
boolean get(long bitIndex) {
return (data.get((int) (bitIndex >>> LONG_ADDRESSABLE_BITS)) & (1L << bitIndex)) != 0;
}
/**
* Careful here: if threads are mutating the atomicLongArray while this method is executing, the
* final long[] will be a "rolling snapshot" of the state of the bit array. This is usually good
* enough, but should be kept in mind.
*/
public static long[] toPlainArray(AtomicLongArray atomicLongArray) {
long[] array = new long[atomicLongArray.length()];
for (int i = 0; i < array.length; ++i) {
array[i] = atomicLongArray.get(i);
}
return array;
}
/** Number of bits */
long bitSize() {
return (long) data.length() * Long.SIZE;
}
/**
* Number of set bits (1s).
*
* <p>Note that because of concurrent set calls and uses of atomics, this bitCount is a (very)
* close *estimate* of the actual number of bits set. It's not possible to do better than an
* estimate without locking. Note that the number, if not exactly accurate, is *always*
* underestimating, never overestimating.
*/
long bitCount() {
return bitCount.sum();
}
LockFreeBitArray copy() {
return new LockFreeBitArray(toPlainArray(data));
}
/**
* Combines the two BitArrays using bitwise OR.
*
* <p>NOTE: Because of the use of atomics, if the other LockFreeBitArray is being mutated while
* this operation is executing, not all of those new 1's may be set in the final state of this
* LockFreeBitArray. The ONLY guarantee provided is that all the bits that were set in the other
* LockFreeBitArray at the start of this method will be set in this LockFreeBitArray at the end
* of this method.
*/
void putAll(LockFreeBitArray other) {
checkArgument(
data.length() == other.data.length(),
"BitArrays must be of equal length (%s != %s)",
data.length(),
other.data.length());
for (int i = 0; i < data.length(); i++) {
long otherLong = other.data.get(i);
long ourLongOld;
long ourLongNew;
boolean changedAnyBits = true;
do {
ourLongOld = data.get(i);
ourLongNew = ourLongOld | otherLong;
if (ourLongOld == ourLongNew) {
changedAnyBits = false;
break;
}
} while (!data.compareAndSet(i, ourLongOld, ourLongNew));
if (changedAnyBits) {
int bitsAdded = Long.bitCount(ourLongNew) - Long.bitCount(ourLongOld);
bitCount.add(bitsAdded);
}
}
}
@Override
public boolean equals(@NullableDecl Object o) {
if (o instanceof LockFreeBitArray) {
LockFreeBitArray lockFreeBitArray = (LockFreeBitArray) o;
// TODO(lowasser): avoid allocation here
return Arrays.equals(toPlainArray(data), toPlainArray(lockFreeBitArray.data));
}
return false;
}
@Override
public int hashCode() {
// TODO(lowasser): avoid allocation here
return Arrays.hashCode(toPlainArray(data));
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.actionSystem;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.PossiblyDumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.util.SmartList;
import com.intellij.util.ui.UIUtil;
import org.intellij.lang.annotations.JdkConstants;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.List;
/**
* Represents an entity that has a state, a presentation and can be performed.
*
* For an action to be useful, you need to implement {@link AnAction#actionPerformed}
* and optionally to override {@link AnAction#update}. By overriding the
* {@link AnAction#update} method you can dynamically change action's presentation
* depending on the place (for more information on places see {@link com.intellij.openapi.actionSystem.ActionPlaces}.
*
* The same action can have various presentations.
*
* <pre>
* public class MyAction extends AnAction {
* public MyAction() {
* // ...
* }
*
* public void update(AnActionEvent e) {
* Presentation presentation = e.getPresentation();
* if (e.getPlace().equals(ActionPlaces.MAIN_MENU)) {
* presentation.setText("My Menu item name");
* } else if (e.getPlace().equals(ActionPlaces.MAIN_TOOLBAR)) {
* presentation.setText("My Toolbar item name");
* }
* }
*
* public void actionPerformed(AnActionEvent e) { ... }
* }
* </pre>
*
* @see AnActionEvent
* @see Presentation
* @see com.intellij.openapi.actionSystem.ActionPlaces
*/
public abstract class AnAction implements PossiblyDumbAware {
private static final Logger LOG = Logger.getInstance(AnAction.class);
public static final Key<List<AnAction>> ACTIONS_KEY = Key.create("AnAction.shortcutSet");
public static final AnAction[] EMPTY_ARRAY = new AnAction[0];
private Presentation myTemplatePresentation;
@NotNull
private ShortcutSet myShortcutSet = CustomShortcutSet.EMPTY;
private boolean myEnabledInModalContext;
private boolean myIsDefaultIcon = true;
private boolean myWorksInInjected;
private boolean myIsGlobal; // action is registered in ActionManager
/**
* Creates a new action with its text, description and icon set to {@code null}.
*/
public AnAction(){
// avoid eagerly creating template presentation
}
/**
* Creates a new action with {@code icon} provided. Its text, description set to {@code null}.
*
* @param icon Default icon to appear in toolbars and menus (Note some platform don't have icons in menu).
*/
public AnAction(Icon icon){
this(null, null, icon);
}
/**
* Creates a new action with the specified text. Description and icon are
* set to {@code null}.
*
* @param text Serves as a tooltip when the presentation is a button and the name of the
* menu item when the presentation is a menu item.
*/
public AnAction(@Nullable String text){
this(text, null, null);
}
/**
* Constructs a new action with the specified text, description and icon.
*
* @param text Serves as a tooltip when the presentation is a button and the name of the
* menu item when the presentation is a menu item
*
* @param description Describes current action, this description will appear on
* the status bar when presentation has focus
*
* @param icon Action's icon
*/
public AnAction(@Nullable String text, @Nullable String description, @Nullable Icon icon) {
Presentation presentation = getTemplatePresentation();
presentation.setText(text);
presentation.setDescription(description);
presentation.setIcon(icon);
}
/**
* Returns the shortcut set associated with this action.
*
* @return shortcut set associated with this action
*/
@NotNull
public final ShortcutSet getShortcutSet(){
return myShortcutSet;
}
/**
* Registers a set of shortcuts that will be processed when the specified component
* is the ancestor of focused component. Note that the action doesn't have
* to be registered in action manager in order for that shortcut to work.
*
* @param shortcutSet the shortcuts for the action.
* @param component the component for which the shortcuts will be active.
*/
public final void registerCustomShortcutSet(@NotNull ShortcutSet shortcutSet, @Nullable JComponent component) {
registerCustomShortcutSet(shortcutSet, component, null);
}
public final void registerCustomShortcutSet(int keyCode, @JdkConstants.InputEventMask int modifiers, @Nullable JComponent component) {
registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(keyCode, modifiers)), component);
}
public final void registerCustomShortcutSet(@NotNull ShortcutSet shortcutSet, @Nullable JComponent component, @Nullable Disposable parentDisposable) {
setShortcutSet(shortcutSet);
registerCustomShortcutSet(component, parentDisposable);
}
public final void registerCustomShortcutSet(@Nullable JComponent component, @Nullable Disposable parentDisposable) {
if (component == null) return;
List<AnAction> actionList = UIUtil.getClientProperty(component, ACTIONS_KEY);
if (actionList == null) {
UIUtil.putClientProperty(component, ACTIONS_KEY, actionList = new SmartList<>());
}
if (!actionList.contains(this)) {
actionList.add(this);
}
if (parentDisposable != null) {
Disposer.register(parentDisposable, () -> unregisterCustomShortcutSet(component));
}
}
public final void unregisterCustomShortcutSet(@Nullable JComponent component) {
List<AnAction> actionList = UIUtil.getClientProperty(component, ACTIONS_KEY);
if (actionList != null) {
actionList.remove(this);
}
}
/**
* Copies template presentation and shortcuts set from {@code sourceAction}.
*
* @param sourceAction cannot be {@code null}
*/
public final void copyFrom(@NotNull AnAction sourceAction){
Presentation sourcePresentation = sourceAction.getTemplatePresentation();
Presentation presentation = getTemplatePresentation();
presentation.copyFrom(sourcePresentation);
copyShortcutFrom(sourceAction);
}
public final void copyShortcutFrom(@NotNull AnAction sourceAction) {
setShortcutSet(sourceAction.getShortcutSet());
}
public final boolean isEnabledInModalContext() {
return myEnabledInModalContext;
}
protected final void setEnabledInModalContext(boolean enabledInModalContext) {
myEnabledInModalContext = enabledInModalContext;
}
/**
* Override with true returned if your action has to display its text along with the icon when placed in the toolbar
*/
public boolean displayTextInToolbar() {
return false;
}
/**
* Override with true returned if your action displays text in a smaller font (same as toolbar combobox font) when placed in the toolbar
*/
public boolean useSmallerFontForTextInToolbar() {
return false;
}
/**
* Updates the state of the action. Default implementation does nothing.
* Override this method to provide the ability to dynamically change action's
* state and(or) presentation depending on the context (For example
* when your action state depends on the selection you can check for
* selection and change the state accordingly).
* This method can be called frequently, for instance, if an action is added to a toolbar,
* it will be updated twice a second. This means that this method is supposed to work really fast,
* no real work should be done at this phase. For example, checking selection in a tree or a list,
* is considered valid, but working with a file system is not. If you cannot understand the state of
* the action fast you should do it in the {@link #actionPerformed(AnActionEvent)} method and notify
* the user that action cannot be executed if it's the case.
*
* @param e Carries information on the invocation place and data available
*/
public void update(@NotNull AnActionEvent e) {
}
/**
* Same as {@link #update(AnActionEvent)} but is calls immediately before actionPerformed() as final check guard.
* Default implementation delegates to {@link #update(AnActionEvent)}.
*
* @param e Carries information on the invocation place and data available
*/
public void beforeActionPerformedUpdate(@NotNull AnActionEvent e) {
boolean worksInInjected = isInInjectedContext();
e.setInjectedContext(worksInInjected);
update(e);
if (!e.getPresentation().isEnabled() && worksInInjected) {
e.setInjectedContext(false);
update(e);
}
}
/**
* Returns a template presentation that will be used
* as a template for created presentations.
*
* @return template presentation
*/
@NotNull
public final Presentation getTemplatePresentation() {
Presentation presentation = myTemplatePresentation;
if (presentation == null){
myTemplatePresentation = presentation = new Presentation();
}
return presentation;
}
/**
* Implement this method to provide your action handler.
*
* @param e Carries information on the invocation place
*/
public abstract void actionPerformed(@NotNull AnActionEvent e);
protected void setShortcutSet(@NotNull ShortcutSet shortcutSet) {
if (myIsGlobal && myShortcutSet != shortcutSet) {
LOG.warn("ShortcutSet of global AnActions should not be changed outside of KeymapManager.\n" +
"This is likely not what you wanted to do. Consider setting shortcut in keymap defaults, inheriting from other action " +
"using `use-shortcut-of` or wrapping with EmptyAction.wrap().", new Throwable());
}
myShortcutSet = shortcutSet;
}
/**
* Sets the flag indicating whether the action has an internal or a user-customized icon.
* @param isDefaultIconSet true if the icon is internal, false if the icon is customized by the user.
*/
public void setDefaultIcon(boolean isDefaultIconSet) {
myIsDefaultIcon = isDefaultIconSet;
}
/**
* Returns true if the action has an internal, not user-customized icon.
* @return true if the icon is internal, false if the icon is customized by the user.
*/
public boolean isDefaultIcon() {
return myIsDefaultIcon;
}
/**
* Enables automatic detection of injected fragments in editor. Values in DataContext, passed to the action, like EDITOR, PSI_FILE
* will refer to an injected fragment, if caret is currently positioned on it.
*/
public void setInjectedContext(boolean worksInInjected) {
myWorksInInjected = worksInInjected;
}
public boolean isInInjectedContext() {
return myWorksInInjected;
}
public boolean isTransparentUpdate() {
return this instanceof TransparentUpdate;
}
/**
* @return whether this action should be wrapped into a single transaction. PSI/VFS-related actions
* that can show progresses or modal dialogs should return true. The default value is false, to prevent
* transaction-related assertions from actions in harmless dialogs like "Enter password" shown inside invokeLater.
* @see com.intellij.openapi.application.TransactionGuard
*/
public boolean startInTransaction() {
return false;
}
public interface TransparentUpdate {
}
@Nullable
public static Project getEventProject(AnActionEvent e) {
return e == null ? null : e.getData(CommonDataKeys.PROJECT);
}
@Override
public String toString() {
return getTemplatePresentation().toString();
}
void markAsGlobal() {
myIsGlobal = true;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl;
import com.intellij.ide.IdeEventQueue;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.ApplicationImpl;
import com.intellij.openapi.editor.VisualPosition;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.ex.util.EditorUIUtil;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.ui.Queryable;
import com.intellij.openapi.ui.TypingTarget;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.wm.impl.IdeBackgroundUtil;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.components.Magnificator;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ComponentEvent;
import java.awt.event.InputMethodEvent;
import java.awt.im.InputMethodRequests;
import java.util.Map;
public class EditorComponentImpl extends JComponent implements Scrollable, DataProvider, Queryable, TypingTarget {
private final EditorImpl myEditor;
private final ApplicationImpl myApplication;
public EditorComponentImpl(@NotNull EditorImpl editor) {
myEditor = editor;
enableEvents(AWTEvent.KEY_EVENT_MASK | AWTEvent.INPUT_METHOD_EVENT_MASK);
enableInputMethods(true);
setFocusCycleRoot(true);
setOpaque(true);
putClientProperty(Magnificator.CLIENT_PROPERTY_KEY, new Magnificator() {
@Override
public Point magnify(double scale, Point at) {
VisualPosition magnificationPosition = myEditor.xyToVisualPosition(at);
double currentSize = myEditor.getColorsScheme().getEditorFontSize();
int defaultFontSize = EditorColorsManager.getInstance().getGlobalScheme().getEditorFontSize();
myEditor.setFontSize(Math.max((int)(currentSize * scale), defaultFontSize));
return myEditor.visualPositionToXY(magnificationPosition);
}
});
myApplication = (ApplicationImpl)ApplicationManager.getApplication();
}
@NotNull
public EditorImpl getEditor() {
return myEditor;
}
@Override
public Object getData(String dataId) {
if (myEditor.isRendererMode()) return null;
if (PlatformDataKeys.EDITOR.is(dataId)) {
// for 'big' editors return null to allow injected editors (see com.intellij.openapi.fileEditor.impl.text.TextEditorComponent.getData())
return myEditor.getVirtualFile() == null ? myEditor : null;
}
if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myEditor.getDeleteProvider();
}
if (PlatformDataKeys.CUT_PROVIDER.is(dataId)) {
return myEditor.getCutProvider();
}
if (PlatformDataKeys.COPY_PROVIDER.is(dataId)) {
return myEditor.getCopyProvider();
}
if (PlatformDataKeys.PASTE_PROVIDER.is(dataId)) {
return myEditor.getPasteProvider();
}
return null;
}
@Override
public Color getBackground() {
return myEditor.getBackgroundColor();
}
@Override
public Dimension getPreferredSize() {
return myEditor.getPreferredSize();
}
protected void fireResized() {
processComponentEvent(new ComponentEvent(this, ComponentEvent.COMPONENT_RESIZED));
}
@Override
protected void processInputMethodEvent(InputMethodEvent e) {
super.processInputMethodEvent(e);
if (!e.isConsumed()) {
switch (e.getID()) {
case InputMethodEvent.INPUT_METHOD_TEXT_CHANGED:
myEditor.replaceInputMethodText(e);
// No breaks over here.
//noinspection fallthrough
case InputMethodEvent.CARET_POSITION_CHANGED:
myEditor.inputMethodCaretPositionChanged(e);
break;
}
e.consume();
}
}
@Override
public ActionCallback type(final String text) {
final ActionCallback result = new ActionCallback();
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
myEditor.type(text).notify(result);
}
});
return result;
}
@Override
public InputMethodRequests getInputMethodRequests() {
return IdeEventQueue.getInstance().isInputMethodEnabled() ? myEditor.getInputMethodRequests() : null;
}
@Override
public void paintComponent(Graphics g) {
myApplication.editorPaintStart();
try {
Graphics2D gg = !Boolean.TRUE.equals(EditorTextField.SUPPLEMENTARY_KEY.get(myEditor)) ?
IdeBackgroundUtil.withEditorBackground(g, this) : (Graphics2D)g;
UIUtil.setupComposite(gg);
EditorUIUtil.setupAntialiasing(gg);
myEditor.paint(gg);
}
finally {
myApplication.editorPaintFinish();
}
}
public void repaintEditorComponent() {
repaint();
}
public void repaintEditorComponent(int x, int y, int width, int height) {
repaint(x, y, width, height);
}
//--implementation of Scrollable interface--------------------------------------
@Override
public Dimension getPreferredScrollableViewportSize() {
return myEditor.getPreferredSize();
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == SwingConstants.VERTICAL) {
return myEditor.getLineHeight();
}
// if orientation == SwingConstants.HORIZONTAL
return EditorUtil.getSpaceWidth(Font.PLAIN, myEditor);
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == SwingConstants.VERTICAL) {
int lineHeight = myEditor.getLineHeight();
if (direction > 0) {
int lineNumber = (visibleRect.y + visibleRect.height) / lineHeight;
return lineHeight * lineNumber - visibleRect.y;
}
else {
int lineNumber = (visibleRect.y - visibleRect.height) / lineHeight;
return visibleRect.y - lineHeight * lineNumber;
}
}
// if orientation == SwingConstants.HORIZONTAL
return visibleRect.width;
}
@Override
public boolean getScrollableTracksViewportWidth() {
return getParent() instanceof JViewport && getParent().getWidth() > getPreferredSize().width;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return getParent() instanceof JViewport && getParent().getHeight() > getPreferredSize().height;
}
@Override
public void putInfo(@NotNull Map<String, String> info) {
myEditor.putInfo(info);
}
@NonNls
@Override
public String toString() {
return "EditorComponent file=" + myEditor.getVirtualFile();
}
}
| |
package com.rey.material.app;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import com.rey.material.demo.R;
import com.rey.material.util.ThemeUtil;
import com.rey.material.widget.CompoundButton;
import com.rey.material.widget.EditText;
import com.rey.material.widget.RadioButton;
import com.rey.material.widget.Spinner;
import com.rey.material.widget.TextView;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
/**
* Created by Rey on 2/2/2015.
*/
public class RecurringPickerDialog extends Dialog implements WeekView.OnDaySelectionChangedListener {
private ModeAdapter mModeAdapter;
private EndAdapter mEndAdapter;
private EditText mPeriodEditText;
private TextView mPeriodUnitTextView;
private Spinner mModeSpinner;
private Spinner mEndSpinner;
private RadioButton mSameDayRadioButton;
private RadioButton mSameWeekdayRadioButton;
private EditText mEndNumEditText;
private TextView mEndNumUnitTextView;
private Button mEndDateButton;
private WeekView mWeekView;
private HeaderDrawable mHeaderBackground;
private Recurring mRecurring;
private int mDatePickerDialogStyleId;
private static int[] MONTH_SAME_WEEKDAY = {R.string.rd_month_last, R.string.rd_month_first, R.string.rd_month_second, R.string.rd_month_third, R.string.rd_month_fourth};
private DateFormat mDateFormat = SimpleDateFormat.getDateInstance();
public RecurringPickerDialog(Context context) {
super(context);
}
public RecurringPickerDialog(Context context, int style) {
super(context, style);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
protected void onCreate() {
View v = LayoutInflater.from(getContext()).inflate(R.layout.dialog_recurring, null);
setContentView(v);
FrameLayout fl_mode = (FrameLayout)v.findViewById(R.id.rd_fl_mode);
final ScrollView sv_repeat = (ScrollView)v.findViewById(R.id.rd_sv_repeat);
final LinearLayout ll_repeat = (LinearLayout)v.findViewById(R.id.rd_ll_repeat);
mModeSpinner = (Spinner)fl_mode.findViewById(R.id.rd_spn_mode);
mEndSpinner = (Spinner)v.findViewById(R.id.rd_spn_end);
mPeriodEditText = (EditText)v.findViewById(R.id.rd_et_period);
mPeriodUnitTextView = (TextView)v.findViewById(R.id.rd_tv_period_unit);
mSameDayRadioButton = (RadioButton)v.findViewById(R.id.rd_month_rb_same);
mSameWeekdayRadioButton = (RadioButton)v.findViewById(R.id.rd_month_rb_week);
mEndNumEditText = (EditText)v.findViewById(R.id.rd_et_end_num);
mEndNumUnitTextView = (TextView)v.findViewById(R.id.rd_tv_end_num_unit);
mEndDateButton = (Button)v.findViewById(R.id.rd_bt_end_date);
mWeekView = (WeekView)v.findViewById(R.id.rd_wv_week);
sv_repeat.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
showDivider(ll_repeat.getMeasuredHeight() > sv_repeat.getMeasuredHeight());
}
});
mHeaderBackground = new HeaderDrawable(getContext());
fl_mode.setPadding(mContentPadding, 0, mContentPadding, 0);
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
fl_mode.setBackground(mHeaderBackground);
else
fl_mode.setBackgroundDrawable(mHeaderBackground);
ll_repeat.setPadding(mContentPadding, mActionOuterPadding, mContentPadding, mActionPadding);
mModeAdapter = new ModeAdapter();
mModeSpinner.setAdapter(mModeAdapter);
mModeSpinner.setOnItemSelectedListener(new Spinner.OnItemSelectedListener() {
@Override
public void onItemSelected(Spinner parent, View view, int position, long id) {
onModeSelected(position);
}
});
mEndAdapter = new EndAdapter();
mEndSpinner.setAdapter(mEndAdapter);
mEndSpinner.setOnItemSelectedListener(new Spinner.OnItemSelectedListener() {
@Override
public void onItemSelected(Spinner parent, View view, int position, long id) {
onEndSelected(position);
}
});
mPeriodEditText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {}
@Override
public void afterTextChanged(Editable s) {
onPeriodChanged();
}
});
mPeriodEditText.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_ENTER && event.getAction() == KeyEvent.ACTION_UP){
String text = mPeriodEditText.getText().toString();
if(TextUtils.isEmpty(text))
mPeriodEditText.setText(String.valueOf(mRecurring.getPeriod()));
}
return false;
}
});
CompoundButton.OnCheckedChangeListener mCheckChangeListener = new android.widget.CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(android.widget.CompoundButton buttonView, boolean isChecked) {
if(isChecked){
if(buttonView == mSameDayRadioButton)
mSameWeekdayRadioButton.setChecked(false);
else
mSameDayRadioButton.setChecked(false);
onMonthSettingChanged();
}
}
};
mSameDayRadioButton.setOnCheckedChangeListener(mCheckChangeListener);
mSameWeekdayRadioButton.setOnCheckedChangeListener(mCheckChangeListener);
mEndNumEditText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {}
@Override
public void afterTextChanged(Editable s) {
onEventNumberChanged();
}
});
mEndNumEditText.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_ENTER && event.getAction() == KeyEvent.ACTION_UP){
String text = mEndNumEditText.getText().toString();
if(TextUtils.isEmpty(text))
mEndNumEditText.setText(String.valueOf(mRecurring.getEventNumber()));
}
return false;
}
});
View.OnClickListener mDateClickListener = new View.OnClickListener(){
@Override
public void onClick(View v) {
final DatePickerDialog dialog = new DatePickerDialog(getContext(), mDatePickerDialogStyleId);
long minTime = System.currentTimeMillis();
Calendar cal = dialog.getCalendar();
cal.setTimeInMillis(minTime);
cal.add(Calendar.YEAR, 100);
long maxTime = cal.getTimeInMillis();
dialog.dateRange(minTime, maxTime)
.date((long)mEndDateButton.getTag())
.positiveAction(mPositiveAction.getText())
.positiveActionClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
onEndDateChanged(dialog.getDate());
dialog.dismiss();
}
})
.negativeAction(mNegativeAction.getText())
.negativeActionClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
dialog.dismiss();
}
})
.show();
}
};
mEndDateButton.setOnClickListener(mDateClickListener);
mWeekView.setOnDaySelectionChangedListener(this);
}
@Override
public Dialog applyStyle(int resId) {
return super.applyStyle(resId);
}
@Override
public Dialog layoutParams(int width, int height) {
return super.layoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
}
@Override
public Dialog cornerRadius(float radius){
mHeaderBackground.updateCorner(radius);
return super.cornerRadius(radius);
}
public RecurringPickerDialog recurring(Recurring recurring){
mRecurring = recurring;
updateRecurringData();
return this;
}
public RecurringPickerDialog datePickerLayoutStyle(int styleId){
mDatePickerDialogStyleId = styleId;
return this;
}
public Recurring getRecurring(){
return mRecurring;
}
private void animOut(final View v, final boolean setGone, final boolean immediately){
if(!isShowing() || v.getVisibility() != View.VISIBLE || immediately) {
v.setVisibility(setGone ? View.GONE : View.INVISIBLE);
return;
}
Animation anim = new AlphaAnimation(1f, 0f);
anim.setDuration(getContext().getResources().getInteger(android.R.integer.config_mediumAnimTime));
anim.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {}
@Override
public void onAnimationEnd(Animation animation) {
v.setVisibility(setGone ? View.GONE : View.INVISIBLE);
}
@Override
public void onAnimationRepeat(Animation animation) {}
});
v.startAnimation(anim);
}
private void animIn(final View v, boolean immediately){
if(v.getVisibility() == View.VISIBLE)
return;
if(!isShowing() || immediately) {
v.setVisibility(View.VISIBLE);
return;
}
Animation anim = new AlphaAnimation(0f, 1f);
anim.setDuration(getContext().getResources().getInteger(android.R.integer.config_mediumAnimTime));
anim.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
v.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationEnd(Animation animation) {}
@Override
public void onAnimationRepeat(Animation animation) {}
});
v.startAnimation(anim);
}
private void updateRecurringData(){
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(mRecurring.getStartTime());
int order = Recurring.getWeekDayOrderNum(cal);
String dayOfWeek = cal.getDisplayName(Calendar.DAY_OF_WEEK, Calendar.LONG, Locale.getDefault());
int formattedTextId = MONTH_SAME_WEEKDAY[(order + 1) % MONTH_SAME_WEEKDAY.length];
mSameWeekdayRadioButton.setText(getContext().getResources().getString(formattedTextId, dayOfWeek));
mPeriodEditText.setText(String.valueOf(mRecurring.getPeriod()));
if(mRecurring.getRepeatMode() == Recurring.REPEAT_WEEKLY) {
for(int i = Calendar.SUNDAY; i <= Calendar.SATURDAY; i++)
mWeekView.setSelected(i, mRecurring.isEnabledWeekday(i), true);
}
else{
int day = cal.get(Calendar.DAY_OF_WEEK);
for(int i = Calendar.SUNDAY; i <= Calendar.SATURDAY; i++)
mWeekView.setSelected(i, i == day, true);
if(mRecurring.getRepeatMode() == Recurring.REPEAT_MONTHLY){
mSameDayRadioButton.setCheckedImmediately(mRecurring.getMonthRepeatType() == Recurring.MONTH_SAME_DAY);
mSameWeekdayRadioButton.setCheckedImmediately(mRecurring.getMonthRepeatType() == Recurring.MONTH_SAME_WEEKDAY);
}
else{
mSameDayRadioButton.setCheckedImmediately(true);
mSameWeekdayRadioButton.setCheckedImmediately(false);
}
}
if(mModeSpinner.getSelectedItemPosition() != mRecurring.getRepeatMode())
mModeSpinner.setSelection(mRecurring.getRepeatMode());
else
onModeSelected(mRecurring.getRepeatMode());
mEndNumEditText.setText(String.valueOf(mRecurring.getEndMode() == Recurring.END_FOR_EVENT ? mRecurring.getEventNumber() : 10));
long date = mRecurring.getEndMode() == Recurring.END_UNTIL_DATE ? mRecurring.getEndDate() : (Math.max(System.currentTimeMillis(), mRecurring.getStartTime()) + 86400000L * 31);
mEndDateButton.setText(mDateFormat.format(new Date(date)));
mEndDateButton.setTag(date);
if(mEndSpinner.getSelectedItemPosition() != mRecurring.getEndMode())
mEndSpinner.setSelection(mRecurring.getEndMode());
else
onEndSelected(mRecurring.getEndMode());
}
private void onModeSelected(int mode){
int oldMode = mRecurring.getRepeatMode();
mRecurring.setRepeatMode(mode);
updatePeriodUnit();
mRecurring.setRepeatSetting(0);
if(mode == Recurring.REPEAT_NONE){
mPeriodEditText.setEnabled(false);
mEndSpinner.setEnabled(false);
mEndNumEditText.setEnabled(false);
mEndDateButton.setEnabled(false);
mSameDayRadioButton.setEnabled(false);
mSameWeekdayRadioButton.setEnabled(false);
mWeekView.setEnabled(false);
}
else{
if(oldMode == Recurring.REPEAT_NONE){
mPeriodEditText.setEnabled(true);
mEndSpinner.setEnabled(true);
mEndNumEditText.setEnabled(true);
mEndDateButton.setEnabled(true);
mSameDayRadioButton.setEnabled(true);
mSameWeekdayRadioButton.setEnabled(true);
mWeekView.setEnabled(true);
}
switch (mode){
case Recurring.REPEAT_DAILY:
case Recurring.REPEAT_YEARLY:
animOut(mSameDayRadioButton, true, true);
animOut(mSameWeekdayRadioButton, true, true);
animOut(mWeekView, true, true);
break;
case Recurring.REPEAT_MONTHLY:
animIn(mSameDayRadioButton, false);
animIn(mSameWeekdayRadioButton, false);
animOut(mWeekView, true, true);
mRecurring.setMonthRepeatType(mSameDayRadioButton.isChecked() ? Recurring.MONTH_SAME_DAY : Recurring.MONTH_SAME_WEEKDAY);
break;
case Recurring.REPEAT_WEEKLY:
animOut(mSameDayRadioButton, true, true);
animOut(mSameWeekdayRadioButton, true, true);
animIn(mWeekView, false);
for(int i = Calendar.SUNDAY; i <= Calendar.SATURDAY; i++)
mRecurring.setEnabledWeekday(i, mWeekView.isSelected(i));
break;
}
}
}
private void onEndSelected(int endMode){
int oldEndMode = mRecurring.getEndMode();
mRecurring.setEndMode(endMode);
mRecurring.setEndSetting(0);
switch (endMode){
case Recurring.END_FOREVER:
animOut(mEndNumEditText, false, false);
animOut(mEndNumUnitTextView, false, false);
animOut(mEndDateButton, false, false);
break;
case Recurring.END_UNTIL_DATE:
animOut(mEndNumEditText, false, true);
animOut(mEndNumUnitTextView, false, true);
animIn(mEndDateButton, false);
mRecurring.setEndDate((Long)mEndDateButton.getTag());
break;
case Recurring.END_FOR_EVENT:
animIn(mEndNumEditText, false);
animIn(mEndNumUnitTextView, false);
animOut(mEndDateButton, false, true);
mRecurring.setEventNumber(Integer.parseInt(mEndNumEditText.getText().toString()));
break;
}
}
private void onPeriodChanged(){
String text = mPeriodEditText.getText().toString();
if(!TextUtils.isEmpty(text)){
int period = Integer.parseInt(text);
if(period < 1)
mPeriodEditText.setText("1");
else {
mRecurring.setPeriod(period);
updatePeriodUnit();
}
}
}
private void onMonthSettingChanged(){
mRecurring.setMonthRepeatType(mSameDayRadioButton.isChecked() ? Recurring.MONTH_SAME_DAY : Recurring.MONTH_SAME_WEEKDAY);
}
private void onEventNumberChanged(){
String text = mEndNumEditText.getText().toString();
if(!TextUtils.isEmpty(text)){
int num = Integer.parseInt(text);
if(num < 1)
mEndNumEditText.setText("1");
else {
mRecurring.setEventNumber(num);
updateNumberUnit();
}
}
}
private void onEndDateChanged(long date){
mEndDateButton.setTag(date);
mEndDateButton.setText(mDateFormat.format(new Date(date)));
mRecurring.setEndDate(date);
}
@Override
public void onDaySelectionChanged(int dayOfWeek, boolean selected) {
mRecurring.setEnabledWeekday(dayOfWeek, selected);
}
private void updatePeriodUnit(){
switch (mRecurring.getRepeatMode()){
case Recurring.REPEAT_DAILY:
mPeriodUnitTextView.setText(mRecurring.getPeriod() == 1 ? R.string.rd_day : R.string.rd_days);
break;
case Recurring.REPEAT_WEEKLY:
mPeriodUnitTextView.setText(mRecurring.getPeriod() == 1 ? R.string.rd_week : R.string.rd_weeks);
break;
case Recurring.REPEAT_MONTHLY:
mPeriodUnitTextView.setText(mRecurring.getPeriod() == 1 ? R.string.rd_month : R.string.rd_months);
break;
case Recurring.REPEAT_YEARLY:
mPeriodUnitTextView.setText(mRecurring.getPeriod() == 1 ? R.string.rd_year : R.string.rd_years);
break;
}
}
private void updateNumberUnit(){
mEndNumUnitTextView.setText(mRecurring.getEventNumber() == 1 ? R.string.rd_event : R.string.rd_events);
}
private class ModeAdapter extends BaseAdapter{
private int[] mItems = {R.string.rd_none, R.string.rd_daily, R.string.rd_weekly, R.string.rd_monthly, R.string.rd_yearly};
@Override
public int getCount() {
return mItems.length;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if(v == null)
v = LayoutInflater.from(getContext()).inflate(R.layout.rd_item_mode, parent, false);
((TextView)v).setText(mItems[position]);
return v;
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if(v == null)
v = LayoutInflater.from(getContext()).inflate(R.layout.rd_item_dropdown_mode, parent, false);
((TextView)v).setText(mItems[position]);
return v;
}
}
private class EndAdapter extends BaseAdapter{
private int[] mItems = {R.string.rd_forever, R.string.rd_until, R.string.rd_for};
private int[] mDropDownItems = {R.string.rd_forever, R.string.rd_until_full, R.string.rd_for_full};
@Override
public int getCount() {
return mItems.length;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if(v == null)
v = LayoutInflater.from(getContext()).inflate(R.layout.rd_item_end, parent, false);
((TextView)v).setText(mItems[position]);
return v;
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if(v == null)
v = LayoutInflater.from(getContext()).inflate(R.layout.rd_item_dropdown_end, parent, false);
((TextView)v).setText(mDropDownItems[position]);
return v;
}
}
private class HeaderDrawable extends Drawable{
private Paint mPaint;
private float mRadius;
private Path mPath;
public HeaderDrawable(Context context){
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setColor(ThemeUtil.colorPrimary(context, 0));
mPaint.setStyle(Paint.Style.FILL);
mPath = new Path();
}
public void updateCorner(float radius){
mRadius = radius;
Rect bounds = getBounds();
mPath.reset();
if(radius == 0)
mPath.addRect(bounds.left, bounds.top, bounds.right, bounds.bottom, Path.Direction.CW);
else {
RectF rect = new RectF();
mPath.moveTo(bounds.left, bounds.top - radius);
rect.set(bounds.left, bounds.top, bounds.left + radius * 2, bounds.top + radius * 2);
mPath.arcTo(rect, 180, 90, false);
mPath.lineTo(bounds.right - radius, bounds.top);
rect.set(bounds.right - radius * 2, bounds.top, bounds.right, bounds.top + radius * 2);
mPath.arcTo(rect, 270, 90, false);
mPath.lineTo(bounds.right, bounds.bottom);
mPath.lineTo(bounds.left, bounds.bottom);
mPath.close();
}
invalidateSelf();
}
@Override
protected void onBoundsChange(Rect bounds) {
updateCorner(mRadius);
}
@Override
public void draw(Canvas canvas) {
canvas.drawPath(mPath, mPaint);
}
@Override
public void setAlpha(int alpha) {
mPaint.setAlpha(alpha);
}
@Override
public void setColorFilter(ColorFilter cf) {
mPaint.setColorFilter(cf);
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
}
public static class Builder extends Dialog.Builder{
private Recurring mRecurring;
private int mDatePickerLayoutStyleId;
public Builder() {
super();
}
public Builder(int styleId){
super(styleId);
}
public Builder recurring(Recurring recurring){
mRecurring = recurring;
return this;
}
public Builder datePickerLayoutStyle(int styleId){
mDatePickerLayoutStyleId = styleId;
return this;
}
@Override
public Dialog.Builder contentView(int layoutId) {
return this;
}
@Override
protected Dialog onBuild(Context context, int styleId) {
RecurringPickerDialog dialog = new RecurringPickerDialog(context, styleId);
dialog.recurring(mRecurring)
.datePickerLayoutStyle(mDatePickerLayoutStyleId);
return dialog;
}
protected Builder(Parcel in){
super(in);
}
@Override
protected void onWriteToParcel(Parcel dest, int flags) {
dest.writeInt(mDatePickerLayoutStyleId);
dest.writeLong(mRecurring.getStartTime());
dest.writeInt(mRecurring.getRepeatMode());
dest.writeInt(mRecurring.getPeriod());
dest.writeInt(mRecurring.getRepeatSetting());
dest.writeInt(mRecurring.getEndMode());
dest.writeLong(mRecurring.getEndSetting());
}
@Override
protected void onReadFromParcel(Parcel in) {
mDatePickerLayoutStyleId = in.readInt();
mRecurring = new Recurring();
mRecurring.setStartTime(in.readLong());
mRecurring.setRepeatMode(in.readInt());
mRecurring.setPeriod(in.readInt());
mRecurring.setRepeatSetting(in.readInt());
mRecurring.setEndMode(in.readInt());
mRecurring.setEndSetting(in.readLong());
}
public static final Parcelable.Creator<Builder> CREATOR = new Parcelable.Creator<Builder>() {
public Builder createFromParcel(Parcel in) {
return new Builder(in);
}
public Builder[] newArray(int size) {
return new Builder[size];
}
};
}
}
| |
package edu.nyu.cs.cs2580;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
import java.util.Vector;
import edu.nyu.cs.cs2580.QueryHandler.CgiArguments;
import edu.nyu.cs.cs2580.SearchEngine.Options;
public class SignalFactory {
public static class qlRunner extends Ranker {
protected qlRunner(Options options, CgiArguments arguments, Indexer indexer) {
super(options, arguments, indexer);
}
private static final double lamda = 0.5;
private Map<String, Integer> makeUnigram(Vector<String> dv) {
Map<String, Integer> unigramDic = new HashMap<String, Integer>();
for (int i = 0; i < dv.size(); i++) {
String word = dv.get(i);
if (unigramDic.containsKey(word) == false) {
unigramDic.put(word, 1);
}
else {
unigramDic.put(word, unigramDic.get(word) + 1);
}
}
return unigramDic;
}
public Vector<ScoredDocument> runQuery(Query query, int numResults) {
Vector<ScoredDocument> retrieval_results = new Vector<ScoredDocument>();
for (int i = 0; i < this._indexer.numDocs(); ++i) {
retrieval_results.add(runquery(query._query, i));
}
return retrieval_results;
}
private double calculateProbability(String currentWord, Map<String, Integer> documentDic,
int documentSize) {
double score = 0;
double sind = 0;
// string in document
if (documentDic.containsKey(currentWord) == true) {
sind = (double) documentDic.get(currentWord);
}
// document size
double ds = (double) documentSize;
// probability within Document
double pwd = sind / ds;
// probability within corpus
double termF = (double) this._indexer.corpusTermFrequency(currentWord);
double totalF = (double) this._indexer.totalTermFrequency();
double pwc = termF / totalF;
score = (1 - lamda) * pwd + lamda * pwc;
return Math.log(score);
}
public ScoredDocument runquery(String query, int did) {
DocumentFull d = new DocumentFull(did, (IndexerFullScan) this._indexer);
Vector<String> word_vector = d.getConvertedBodyTokens();
Map<String, Integer> documentDic = makeUnigram(word_vector);
Scanner s = new Scanner(query);
Vector<String> qv = new Vector<String>();
while (s.hasNext()) {
String term = s.next();
qv.add(term);
}
// Calculate the total
int documentSize = word_vector.size();
// Currently it is written as log format
double logScore = 0.0;
for (int i = 0; i < qv.size(); i++) {
String currentWord = qv.get(i);
logScore += calculateProbability(currentWord, documentDic, documentSize);
}
// System.out.println("log score is:"+logScore);
return new ScoredDocument(d, Math.exp(logScore));
}
}
public static class numViewRunner extends Ranker {
protected numViewRunner(Options options, CgiArguments arguments, Indexer indexer) {
super(options, arguments, indexer);
}
public ScoredDocument runquery(String query, int did) {
DocumentFull d = new DocumentFull(did, (IndexerFullScan) this._indexer);
int numviews = d.getNumViews();
double score = Math.log(numviews + 1);
return new ScoredDocument(d, score);
}
public Vector<ScoredDocument> runQuery(Query query, int numResults) {
Vector<ScoredDocument> retrieval_results = new Vector<ScoredDocument>();
for (int i = 0; i < this._indexer.numDocs(); ++i) {
retrieval_results.add(runquery(query._query, i));
}
return retrieval_results;
}
}
public static class phraseRunner extends Ranker {
protected phraseRunner(Options options, CgiArguments arguments, Indexer indexer) {
super(options, arguments, indexer);
}
private Map<String, Integer> makePhrase(Vector<String> dv) {
Map<String, Integer> phraseDic = new HashMap<String, Integer>();
for (int i = 0; i < dv.size() - 1; i++) {
String phrase = dv.get(i) + dv.get(i + 1);
if (phraseDic.containsKey(phrase) == false) {
phraseDic.put(phrase, 1);
}
else {
phraseDic.put(phrase, phraseDic.get(phrase) + 1);
}
}
return phraseDic;
}
private Map<String, Integer> makeUnigram(Vector<String> dv) {
Map<String, Integer> unigramDic = new HashMap<String, Integer>();
for (int i = 0; i < dv.size(); i++) {
String word = dv.get(i);
if (unigramDic.containsKey(word) == false) {
unigramDic.put(word, 1);
}
else {
unigramDic.put(word, unigramDic.get(word) + 1);
}
}
return unigramDic;
}
public ScoredDocument runquery(String query, int did) {
DocumentFull d = new DocumentFull(did, (IndexerFullScan) this._indexer);
Vector<String> dv = d.getConvertedBodyTokens();
Map<String, Integer> dMap;
Map<String, Integer> qMap;
Scanner s = new Scanner(query);
Vector<String> qv = new Vector<String>();
while (s.hasNext()) {
String term = s.next();
qv.add(term);
}
if (qv.size() > 1) {
dMap = makePhrase(dv);
qMap = makePhrase(qv);
} else {
dMap = makeUnigram(dv);
qMap = makeUnigram(qv);
}
double score = 0;
for (String key : qMap.keySet()) {
if (dMap.containsKey(key) == true) {
score = score + dMap.get(key);
}
}
return new ScoredDocument(d, score);
}
public Vector<ScoredDocument> runQuery(Query query, int numResults) {
Vector<ScoredDocument> retrieval_results = new Vector<ScoredDocument>();
for (int i = 0; i < this._indexer.numDocs(); ++i) {
retrieval_results.add(runquery(query._query, i));
}
return retrieval_results;
}
}
public static class linearRunner extends Ranker {
private static double beta1 = 1.0;
private static double beta2 = 10.0;
private static double beta3 = 0.001;
private static double beta4 = 0.01;
private CosineRunner cosinerunner;
private phraseRunner phraserunner;
private numViewRunner numviewrunner;
private qlRunner qlrunner;
public linearRunner(Options options, CgiArguments arguments, Indexer indexer) {
super(options, arguments, indexer);
cosinerunner = new CosineRunner(options, arguments, this._indexer);
phraserunner = new phraseRunner(options, arguments, this._indexer);
numviewrunner = new numViewRunner(options, arguments, this._indexer);
qlrunner = new qlRunner(options, arguments, this._indexer);
}
public ScoredDocument runquery(String query, int did) {
DocumentFull d = new DocumentFull(did, (IndexerFullScan) this._indexer);
ScoredDocument co = cosinerunner.runquery(query, did);
ScoredDocument phrase = phraserunner.runquery(query, did);
ScoredDocument view = numviewrunner.runquery(query, did);
ScoredDocument ql = qlrunner.runquery(query, did);
double score = 0;
score += co._score * beta1;
score += phrase._score * beta3;
score += view._score * beta4;
score += ql._score * beta2;
return new ScoredDocument(d, score);
}
public Vector<ScoredDocument> runQuery(Query query, int numResults) {
Vector<ScoredDocument> retrieval_results = new Vector<ScoredDocument>();
for (int i = 0; i < this._indexer.numDocs(); ++i) {
retrieval_results.add(runquery(query._query, i));
}
return retrieval_results;
}
}
public static class CosineRunner extends Ranker {
protected CosineRunner(Options options, CgiArguments arguments, Indexer indexer) {
super(options, arguments, indexer);
n = this._indexer.numDocs() + 1;
}
private int n;
public Vector<ScoredDocument> runQuery(Query query, int numResults) {
Vector<ScoredDocument> retrieval_results = new Vector<ScoredDocument>();
for (int i = 0; i < this._indexer.numDocs(); ++i) {
retrieval_results.add(runquery(query._query, i));
}
return retrieval_results;
}
private Map<String, Integer> tfDic(Vector<String> dv) {
Map<String, Integer> tfMap = new HashMap<String, Integer>();
for (int i = 0; i < dv.size(); i++) {
String current = dv.get(i);
if (tfMap.containsKey(current) != true) {
tfMap.put(current, 1);
}
else {
tfMap.put(current, tfMap.get(current) + 1);
}
}
return tfMap;
}
private Map<String, Integer> dfDic(Vector<String> dv) {
Map<String, Integer> dfMap = new HashMap<String, Integer>();
for (int i = 0; i < dv.size(); i++) {
String current = dv.get(i);
if (dfMap.containsKey(current) != true) {
dfMap.put(current, this._indexer.corpusDocFrequencyByTerm(current));
}
}
return dfMap;
}
private void updateDf(Map<String, Integer> dfDicforD, Vector<String> qv) {
for (int i = 0; i < qv.size(); i++) {
String current = qv.get(i);
if (dfDicforD.containsKey(current) == false) {
dfDicforD.put(current, 1);
// System.out.println("this is one");
}
else {
dfDicforD.put(current, dfDicforD.get(current) + 1);
// System.out.println("this is "+dfDicforD.get(current));
}
}
}
private double calculateNormalization(Map<String, Integer> tfMap, Map<String, Integer> dfMap) {
double score = 0.0;
for (String key : tfMap.keySet()) {
double currentTf = (double) tfMap.get(key);
// System.out.println("currentTf is:"+currentTf);
// Here simply use e as the base
double inverseNum = (double) n / dfMap.get(key);
// System.out.println("inverseNum is: "+dfMap.get(key));
double currentIdf = Math.log(inverseNum) + 1;
double tfIdf = currentTf * currentIdf;
score += tfIdf * tfIdf;
}
return Math.sqrt(score);
}
public ScoredDocument runquery(String query, int did) {
DocumentFull d = new DocumentFull(did, (IndexerFullScan) this._indexer);
Vector<String> dv = d.getConvertedBodyTokens();
Map<String, Integer> tfDicForD = tfDic(dv);
Map<String, Integer> dfDicForD = dfDic(dv);
Scanner s = new Scanner(query);
Vector<String> qv = new Vector<String>();
while (s.hasNext()) {
String term = s.next();
qv.add(term);
}
Map<String, Integer> tfDic = tfDic(qv);
// Take the query into account as another document
updateDf(dfDicForD, qv);
// calculate the normalization factor
double normalQ = calculateNormalization(tfDic, dfDicForD);
double normalD = calculateNormalization(tfDicForD, dfDicForD);
double score = 0;
// Calculating the final score
for (String key : tfDic.keySet()) {
if (tfDicForD.containsKey(key) == true) {
double tfForQ = tfDic.get(key);
double tfForD = tfDicForD.get(key);
double idf = Math.log((double) n / dfDicForD.get(key)) + 1;
score += tfForQ * tfForD * idf * idf;
}
}
// System.out.println("score is:"+score);
// System.out.println("normalQ is:"+normalQ);
// System.out.println("normal D is :"+normalD);
score = score / (normalQ * normalD);
ScoredDocument result = new ScoredDocument(d, score);
return result;
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.java;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.HasSourceUnderTest;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleType;
import com.facebook.buck.rules.BuildRules;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.Hint;
import com.facebook.buck.rules.Label;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePaths;
import com.facebook.buck.rules.SymlinkTree;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.infer.annotation.SuppressFieldNotInitialized;
import com.google.common.base.Optional;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
public class JavaTestDescription implements Description<JavaTestDescription.Arg> {
public static final BuildRuleType TYPE = BuildRuleType.of("java_test");
private static final Flavor NATIVE_LIBS_SYMLINKS_FLAVOR =
ImmutableFlavor.of("native_libs_symlinks");
private final JavacOptions templateOptions;
private final Optional<Long> testRuleTimeoutMs;
private final CxxPlatform cxxPlatform;
public JavaTestDescription(
JavacOptions templateOptions,
Optional<Long> testRuleTimeoutMs,
CxxPlatform cxxPlatform) {
this.templateOptions = templateOptions;
this.testRuleTimeoutMs = testRuleTimeoutMs;
this.cxxPlatform = cxxPlatform;
}
@Override
public BuildRuleType getBuildRuleType() {
return TYPE;
}
@Override
public Arg createUnpopulatedConstructorArg() {
return new Arg();
}
@Override
public <A extends Arg> JavaTest createBuildRule(
BuildRuleParams params,
BuildRuleResolver resolver,
A args) {
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
JavacOptions.Builder javacOptionsBuilder =
JavaLibraryDescription.getJavacOptions(
resolver,
args,
templateOptions);
AnnotationProcessingParams annotationParams =
args.buildAnnotationProcessingParams(
params.getBuildTarget(),
params.getProjectFilesystem(),
resolver);
javacOptionsBuilder.setAnnotationProcessingParams(annotationParams);
JavacOptions javacOptions = javacOptionsBuilder.build();
CxxLibraryEnhancement cxxLibraryEnhancement = new CxxLibraryEnhancement(
params,
args.useCxxLibraries,
args.vmArgs.or(ImmutableList.<String>of()),
pathResolver,
cxxPlatform);
params = cxxLibraryEnhancement.updatedParams;
ImmutableList<String> vmArgs = cxxLibraryEnhancement.updatedVmArgs;
return new JavaTest(
params.appendExtraDeps(
Iterables.concat(
BuildRules.getExportedRules(
Iterables.concat(
params.getDeclaredDeps(),
resolver.getAllRules(args.providedDeps.get()))),
pathResolver.filterBuildRuleInputs(javacOptions.getInputs()))),
pathResolver,
args.srcs.get(),
JavaLibraryDescription.validateResources(
pathResolver,
args, params.getProjectFilesystem()),
args.labels.get(),
args.contacts.get(),
args.proguardConfig.transform(SourcePaths.toSourcePath(params.getProjectFilesystem())),
/* additionalClasspathEntries */ ImmutableSet.<Path>of(),
args.testType.or(TestType.JUNIT),
javacOptions,
vmArgs,
validateAndGetSourcesUnderTest(
args.sourceUnderTest.get(),
params.getBuildTarget(),
resolver),
args.resourcesRoot,
testRuleTimeoutMs,
args.getRunTestSeparately());
}
public static ImmutableSet<BuildRule> validateAndGetSourcesUnderTest(
ImmutableSet<BuildTarget> sourceUnderTestTargets,
BuildTarget owner,
BuildRuleResolver resolver) {
ImmutableSet.Builder<BuildRule> sourceUnderTest = ImmutableSet.builder();
for (BuildTarget target : sourceUnderTestTargets) {
BuildRule rule = resolver.getRule(target);
if (!(rule instanceof JavaLibrary)) {
// In this case, the source under test specified in the build file was not a Java library
// rule. Since EMMA requires the sources to be in Java, we will throw this exception and
// not continue with the tests.
throw new HumanReadableException(
"Specified source under test for %s is not a Java library: %s (%s).",
owner,
rule.getFullyQualifiedName(),
rule.getType());
}
sourceUnderTest.add(rule);
}
return sourceUnderTest.build();
}
@SuppressFieldNotInitialized
public static class Arg extends JavaLibraryDescription.Arg implements HasSourceUnderTest {
public Optional<ImmutableSortedSet<String>> contacts;
public Optional<ImmutableSortedSet<Label>> labels;
@Hint(isDep = false) public Optional<ImmutableSortedSet<BuildTarget>> sourceUnderTest;
public Optional<ImmutableList<String>> vmArgs;
public Optional<TestType> testType;
public Optional<Boolean> runTestSeparately;
public Optional<Boolean> useCxxLibraries;
@Override
public ImmutableSortedSet<BuildTarget> getSourceUnderTest() {
return sourceUnderTest.get();
}
public boolean getRunTestSeparately() {
return runTestSeparately.or(false);
}
}
public static class CxxLibraryEnhancement {
public final BuildRuleParams updatedParams;
public final ImmutableList<String> updatedVmArgs;
public CxxLibraryEnhancement(
BuildRuleParams params,
Optional<Boolean> useCxxLibraries,
ImmutableList<String> vmArgs,
SourcePathResolver pathResolver,
CxxPlatform cxxPlatform) {
if (useCxxLibraries.or(false)) {
SymlinkTree nativeLibsSymlinkTree =
buildNativeLibsSymlinkTreeRule(params, pathResolver, cxxPlatform);
updatedParams = params.appendExtraDeps(ImmutableList.<BuildRule>builder()
.add(nativeLibsSymlinkTree)
// Add all the native libraries as first-order dependencies.
// This has two effects:
// (1) They become runtime deps because JavaTest adds all first-order deps.
// (2) They affect the JavaTest's RuleKey, so changing them will invalidate
// the test results cache.
.addAll(nativeLibsSymlinkTree.getDeps())
.build());
updatedVmArgs = ImmutableList.<String>builder()
.addAll(vmArgs)
.add("-Djava.library.path=" + nativeLibsSymlinkTree.getRoot())
.build();
} else {
updatedParams = params;
updatedVmArgs = vmArgs;
}
}
public static SymlinkTree buildNativeLibsSymlinkTreeRule(
BuildRuleParams buildRuleParams,
SourcePathResolver pathResolver,
CxxPlatform cxxPlatform) {
ImmutableMap<String, SourcePath> nativeLibrariesStringKeys =
JavaLibraryRules.getNativeLibraries(
buildRuleParams.getDeps(),
cxxPlatform);
ImmutableMap.Builder<Path, SourcePath> nativeLibrariesBuilder = ImmutableMap.builder();
for (Map.Entry<String, SourcePath> entry : nativeLibrariesStringKeys.entrySet()) {
nativeLibrariesBuilder.put(Paths.get(entry.getKey()), entry.getValue());
}
ImmutableMap<Path, SourcePath> nativeLibraries = nativeLibrariesBuilder.build();
BuildRuleParams paramsForNativeLibsSymlinkTree = buildRuleParams.copyWithChanges(
buildRuleParams.getBuildTarget().withFlavors(NATIVE_LIBS_SYMLINKS_FLAVOR),
Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of()),
Suppliers.ofInstance(
ImmutableSortedSet.copyOf(
pathResolver.filterBuildRuleInputs(nativeLibraries.values()))));
Path nativeLibsSymlinkDir = BuildTargets.getScratchPath(
paramsForNativeLibsSymlinkTree.getBuildTarget(),
"_%s_linktree");
return new SymlinkTree(
paramsForNativeLibsSymlinkTree,
pathResolver,
nativeLibsSymlinkDir,
nativeLibraries);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.livy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import org.apache.commons.lang3.StringUtils;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InterpreterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.security.kerberos.client.KerberosRestTemplate;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
import java.nio.charset.Charset;
import java.util.*;
import java.util.Map.Entry;
/***
* Livy helper class
*/
public class LivyHelper {
Logger LOGGER = LoggerFactory.getLogger(LivyHelper.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
HashMap<String, Object> paragraphHttpMap = new HashMap<>();
Properties property;
Integer MAX_NOS_RETRY = 60;
LivyHelper(Properties property) {
this.property = property;
}
public Integer createSession(InterpreterContext context, String kind) throws Exception {
try {
Map<String, String> conf = new HashMap<String, String>();
Iterator<Entry<Object, Object>> it = property.entrySet().iterator();
while (it.hasNext()) {
Entry<Object, Object> pair = it.next();
if (pair.getKey().toString().startsWith("livy.spark.") &&
!pair.getValue().toString().isEmpty())
conf.put(pair.getKey().toString().substring(5), pair.getValue().toString());
}
String confData = gson.toJson(conf);
String user = context.getAuthenticationInfo().getUser();
String json = executeHTTP(property.getProperty("zeppelin.livy.url") + "/sessions", "POST",
"{" +
"\"kind\": \"" + kind + "\", " +
"\"conf\": " + confData + ", " +
"\"proxyUser\": " + (StringUtils.isEmpty(user) ? null : "\"" + user + "\"") +
"}",
context.getParagraphId()
);
Map jsonMap = (Map<Object, Object>) gson.fromJson(json,
new TypeToken<Map<Object, Object>>() {
}.getType());
Integer sessionId = ((Double) jsonMap.get("id")).intValue();
if (!jsonMap.get("state").equals("idle")) {
Integer nosRetry = MAX_NOS_RETRY;
while (nosRetry >= 0) {
LOGGER.error(String.format("sessionId:%s state is %s",
jsonMap.get("id"), jsonMap.get("state")));
Thread.sleep(1000);
json = executeHTTP(property.getProperty("zeppelin.livy.url") + "/sessions/" +
sessionId, "GET", null, context.getParagraphId());
jsonMap = (Map<Object, Object>) gson.fromJson(json,
new TypeToken<Map<Object, Object>>() {
}.getType());
if (jsonMap.get("state").equals("idle")) {
break;
} else if (jsonMap.get("state").equals("error") || jsonMap.get("state").equals("dead")) {
json = executeHTTP(property.getProperty("zeppelin.livy.url") + "/sessions/" +
sessionId + "/log",
"GET", null,
context.getParagraphId());
jsonMap = (Map<Object, Object>) gson.fromJson(json,
new TypeToken<Map<Object, Object>>() {
}.getType());
String logs = StringUtils.join((ArrayList<String>) jsonMap.get("log"), '\n');
LOGGER.error(String.format("Cannot start %s.\n%s", kind, logs));
throw new Exception(String.format("Cannot start %s.\n%s", kind, logs));
}
nosRetry--;
}
if (nosRetry <= 0) {
LOGGER.error("Error getting session for user within 60Sec.");
throw new Exception(String.format("Cannot start %s.", kind));
}
}
return sessionId;
} catch (Exception e) {
LOGGER.error("Error getting session for user", e);
throw e;
}
}
protected void initializeSpark(final InterpreterContext context,
final Map<String, Integer> userSessionMap) throws Exception {
interpret("val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n" +
"import sqlContext.implicits._", context, userSessionMap);
}
public InterpreterResult interpretInput(String stringLines,
final InterpreterContext context,
final Map<String, Integer> userSessionMap,
LivyOutputStream out) {
try {
String[] lines = stringLines.split("\n");
String[] linesToRun = new String[lines.length + 1];
for (int i = 0; i < lines.length; i++) {
linesToRun[i] = lines[i];
}
linesToRun[lines.length] = "print(\"\")";
out.setInterpreterOutput(context.out);
context.out.clear();
Code r = null;
String incomplete = "";
boolean inComment = false;
for (int l = 0; l < linesToRun.length; l++) {
String s = linesToRun[l];
// check if next line starts with "." (but not ".." or "./") it is treated as an invocation
//for spark
if (l + 1 < linesToRun.length) {
String nextLine = linesToRun[l + 1].trim();
boolean continuation = false;
if (nextLine.isEmpty()
|| nextLine.startsWith("//") // skip empty line or comment
|| nextLine.startsWith("}")
|| nextLine.startsWith("object")) { // include "} object" for Scala companion object
continuation = true;
} else if (!inComment && nextLine.startsWith("/*")) {
inComment = true;
continuation = true;
} else if (inComment && nextLine.lastIndexOf("*/") >= 0) {
inComment = false;
continuation = true;
} else if (nextLine.length() > 1
&& nextLine.charAt(0) == '.'
&& nextLine.charAt(1) != '.' // ".."
&& nextLine.charAt(1) != '/') { // "./"
continuation = true;
} else if (inComment) {
continuation = true;
}
if (continuation) {
incomplete += s + "\n";
continue;
}
}
InterpreterResult res;
try {
res = interpret(incomplete + s, context, userSessionMap);
} catch (Exception e) {
LOGGER.error("Interpreter exception", e);
return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
}
r = res.code();
if (r == Code.ERROR) {
out.setInterpreterOutput(null);
return res;
} else if (r == Code.INCOMPLETE) {
incomplete += s + "\n";
} else {
out.write((res.message() + "\n").getBytes(Charset.forName("UTF-8")));
incomplete = "";
}
}
if (r == Code.INCOMPLETE) {
out.setInterpreterOutput(null);
return new InterpreterResult(r, "Incomplete expression");
} else {
out.setInterpreterOutput(null);
return new InterpreterResult(Code.SUCCESS);
}
} catch (Exception e) {
LOGGER.error("error in interpretInput", e);
return new InterpreterResult(Code.ERROR, e.getMessage());
}
}
public InterpreterResult interpret(String stringLines,
final InterpreterContext context,
final Map<String, Integer> userSessionMap)
throws Exception {
stringLines = stringLines
//for "\n" present in string
.replaceAll("\\\\n", "\\\\\\\\n")
//for new line present in string
.replaceAll("\\n", "\\\\n")
// for \" present in string
.replaceAll("\\\\\"", "\\\\\\\\\"")
// for " present in string
.replaceAll("\"", "\\\\\"");
if (stringLines.trim().equals("")) {
return new InterpreterResult(Code.SUCCESS, "");
}
Map jsonMap = executeCommand(stringLines, context, userSessionMap);
Integer id = ((Double) jsonMap.get("id")).intValue();
InterpreterResult res = getResultFromMap(jsonMap);
if (res != null) {
return res;
}
while (true) {
Thread.sleep(1000);
if (paragraphHttpMap.get(context.getParagraphId()) == null) {
return new InterpreterResult(Code.INCOMPLETE, "");
}
jsonMap = getStatusById(context, userSessionMap, id);
InterpreterResult interpreterResult = getResultFromMap(jsonMap);
if (interpreterResult != null) {
return interpreterResult;
}
}
}
private InterpreterResult getResultFromMap(Map jsonMap) {
if (jsonMap.get("state").equals("available")) {
if (((Map) jsonMap.get("output")).get("status").equals("error")) {
StringBuilder errorMessage = new StringBuilder((String) ((Map) jsonMap
.get("output")).get("evalue"));
if (errorMessage.toString().equals("incomplete statement")
|| errorMessage.toString().contains("EOF")) {
return new InterpreterResult(Code.INCOMPLETE, "");
}
String traceback = gson.toJson(((Map) jsonMap.get("output")).get("traceback"));
if (!traceback.equals("[]")) {
errorMessage
.append("\n")
.append("traceback: \n")
.append(traceback);
}
return new InterpreterResult(Code.ERROR, errorMessage.toString());
}
if (((Map) jsonMap.get("output")).get("status").equals("ok")) {
String result = (String) ((Map) ((Map) jsonMap.get("output"))
.get("data")).get("text/plain");
if (result != null) {
result = result.trim();
if (result.startsWith("<link")
|| result.startsWith("<script")
|| result.startsWith("<style")
|| result.startsWith("<div")) {
result = "%html " + result;
}
}
return new InterpreterResult(Code.SUCCESS, result);
}
}
return null;
}
private Map executeCommand(String lines, InterpreterContext context,
Map<String, Integer> userSessionMap) throws Exception {
String json = executeHTTP(property.get("zeppelin.livy.url") + "/sessions/"
+ userSessionMap.get(context.getAuthenticationInfo().getUser())
+ "/statements",
"POST",
"{\"code\": \"" + lines + "\" }",
context.getParagraphId());
if (json.matches("^(\")?Session (\'[0-9]\' )?not found(.?\"?)$")) {
throw new Exception("Exception: Session not found, Livy server would have restarted, " +
"or lost session.");
}
try {
Map jsonMap = gson.fromJson(json,
new TypeToken<Map>() {
}.getType());
return jsonMap;
} catch (Exception e) {
LOGGER.error("Error executeCommand", e);
throw e;
}
}
private Map getStatusById(InterpreterContext context,
Map<String, Integer> userSessionMap, Integer id) throws Exception {
String json = executeHTTP(property.getProperty("zeppelin.livy.url") + "/sessions/"
+ userSessionMap.get(context.getAuthenticationInfo().getUser())
+ "/statements/" + id,
"GET", null, context.getParagraphId());
try {
Map jsonMap = gson.fromJson(json,
new TypeToken<Map>() {
}.getType());
return jsonMap;
} catch (Exception e) {
LOGGER.error("Error getStatusById", e);
throw e;
}
}
private RestTemplate getRestTemplate() {
String keytabLocation = property.getProperty("zeppelin.livy.keytab");
String principal = property.getProperty("zeppelin.livy.principal");
if (StringUtils.isNotEmpty(keytabLocation) && StringUtils.isNotEmpty(principal)) {
return new KerberosRestTemplate(keytabLocation, principal);
}
return new RestTemplate();
}
protected String executeHTTP(String targetURL, String method, String jsonData, String paragraphId)
throws Exception {
RestTemplate restTemplate = getRestTemplate();
HttpHeaders headers = new HttpHeaders();
headers.add("Content-Type", "application/json");
ResponseEntity<String> response = null;
try {
if (method.equals("POST")) {
HttpEntity<String> entity = new HttpEntity<String>(jsonData, headers);
response = restTemplate.exchange(targetURL, HttpMethod.POST, entity, String.class);
paragraphHttpMap.put(paragraphId, response);
} else if (method.equals("GET")) {
HttpEntity<String> entity = new HttpEntity<String>(headers);
response = restTemplate.exchange(targetURL, HttpMethod.GET, entity, String.class);
paragraphHttpMap.put(paragraphId, response);
} else if (method.equals("DELETE")) {
HttpEntity<String> entity = new HttpEntity<String>(headers);
response = restTemplate.exchange(targetURL, HttpMethod.DELETE, entity, String.class);
}
} catch (HttpClientErrorException e) {
response = new ResponseEntity(e.getResponseBodyAsString(), e.getStatusCode());
LOGGER.error(String.format("Error with %s StatusCode: %s",
response.getStatusCode().value(), e.getResponseBodyAsString()));
}
if (response == null) {
return null;
}
if (response.getStatusCode().value() == 200
|| response.getStatusCode().value() == 201
|| response.getStatusCode().value() == 404) {
return response.getBody();
} else {
String responseString = response.getBody();
if (responseString.contains("CreateInteractiveRequest[\\\"master\\\"]")) {
return responseString;
}
LOGGER.error(String.format("Error with %s StatusCode: %s",
response.getStatusCode().value(), responseString));
throw new Exception(String.format("Error with %s StatusCode: %s",
response.getStatusCode().value(), responseString));
}
}
public void cancelHTTP(String paragraphId) {
paragraphHttpMap.put(paragraphId, null);
}
public void closeSession(Map<String, Integer> userSessionMap) {
for (Map.Entry<String, Integer> entry : userSessionMap.entrySet()) {
try {
executeHTTP(property.getProperty("zeppelin.livy.url") + "/sessions/"
+ entry.getValue(),
"DELETE", null, null);
} catch (Exception e) {
LOGGER.error(String.format("Error closing session for user with session ID: %s",
entry.getValue()), e);
}
}
}
}
| |
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx;
import java.util.ArrayList;
import org.jivesoftware.smack.*;
import org.jivesoftware.smack.packet.*;
import org.jivesoftware.smack.test.SmackTestCase;
/**
*
* Test the MessageEvent extension using the high level API.
*
* @author Gaston Dombiak
*/
public class MessageEventManagerTest extends SmackTestCase {
public MessageEventManagerTest(String name) {
super(name);
}
/**
* High level API test.
* This is a simple test to use with a XMPP client and check if the client receives the
* message
* 1. User_1 will send a message to user_2 requesting to be notified when any of these events
* occurs: offline, composing, displayed or delivered
*/
public void testSendMessageEventRequest() {
// Create a chat for each connection
Chat chat1 = getConnection(0).getChatManager().createChat(getBareJID(1), null);
// Create the message to send with the roster
Message msg = new Message();
msg.setSubject("Any subject you want");
msg.setBody("An interesting body comes here...");
// Add to the message all the notifications requests (offline, delivered, displayed,
// composing)
MessageEventManager.addNotificationsRequests(msg, true, true, true, true);
// Send the message that contains the notifications request
try {
chat1.sendMessage(msg);
} catch (Exception e) {
fail("An error occured sending the message");
}
}
/**
* High level API test.
* This is a simple test to use with a XMPP client, check if the client receives the
* message and display in the console any notification
* 1. User_1 will send a message to user_2 requesting to be notified when any of these events
* occurs: offline, composing, displayed or delivered
* 2. User_2 will use a XMPP client (like Exodus) to display the message and compose a reply
* 3. User_1 will display any notification that receives
*/
public void testSendMessageEventRequestAndDisplayNotifications() {
// Create a chat for each connection
Chat chat1 = getConnection(0).getChatManager().createChat(getBareJID(1), null);
MessageEventManager messageEventManager = new MessageEventManager(getConnection(0));
messageEventManager
.addMessageEventNotificationListener(new MessageEventNotificationListener() {
public void deliveredNotification(String from, String packetID) {
System.out.println("From: " + from + " PacketID: " + packetID + "(delivered)");
}
public void displayedNotification(String from, String packetID) {
System.out.println("From: " + from + " PacketID: " + packetID + "(displayed)");
}
public void composingNotification(String from, String packetID) {
System.out.println("From: " + from + " PacketID: " + packetID + "(composing)");
}
public void offlineNotification(String from, String packetID) {
System.out.println("From: " + from + " PacketID: " + packetID + "(offline)");
}
public void cancelledNotification(String from, String packetID) {
System.out.println("From: " + from + " PacketID: " + packetID + "(cancelled)");
}
});
// Create the message to send with the roster
Message msg = new Message();
msg.setSubject("Any subject you want");
msg.setBody("An interesting body comes here...");
// Add to the message all the notifications requests (offline, delivered, displayed,
// composing)
MessageEventManager.addNotificationsRequests(msg, true, true, true, true);
// Send the message that contains the notifications request
try {
chat1.sendMessage(msg);
// Wait a few seconds so that the XMPP client can send any event
Thread.sleep(200);
} catch (Exception e) {
fail("An error occured sending the message");
}
}
/**
* High level API test.
* 1. User_1 will send a message to user_2 requesting to be notified when any of these events
* occurs: offline, composing, displayed or delivered
* 2. User_2 will receive the message
* 3. User_2 will simulate that the message was displayed
* 4. User_2 will simulate that he/she is composing a reply
* 5. User_2 will simulate that he/she has cancelled the reply
*/
public void testRequestsAndNotifications() {
final ArrayList<String> results = new ArrayList<String>();
ArrayList<String> resultsExpected = new ArrayList<String>();
resultsExpected.add("deliveredNotificationRequested");
resultsExpected.add("composingNotificationRequested");
resultsExpected.add("displayedNotificationRequested");
resultsExpected.add("offlineNotificationRequested");
resultsExpected.add("deliveredNotification");
resultsExpected.add("displayedNotification");
resultsExpected.add("composingNotification");
resultsExpected.add("cancelledNotification");
// Create a chat for each connection
Chat chat1 = getConnection(0).getChatManager().createChat(getBareJID(1), null);
MessageEventManager messageEventManager1 = new MessageEventManager(getConnection(0));
messageEventManager1
.addMessageEventNotificationListener(new MessageEventNotificationListener() {
public void deliveredNotification(String from, String packetID) {
results.add("deliveredNotification");
}
public void displayedNotification(String from, String packetID) {
results.add("displayedNotification");
}
public void composingNotification(String from, String packetID) {
results.add("composingNotification");
}
public void offlineNotification(String from, String packetID) {
results.add("offlineNotification");
}
public void cancelledNotification(String from, String packetID) {
results.add("cancelledNotification");
}
});
MessageEventManager messageEventManager2 = new MessageEventManager(getConnection(1));
messageEventManager2
.addMessageEventRequestListener(new DefaultMessageEventRequestListener() {
public void deliveredNotificationRequested(
String from,
String packetID,
MessageEventManager messageEventManager) {
super.deliveredNotificationRequested(from, packetID, messageEventManager);
results.add("deliveredNotificationRequested");
}
public void displayedNotificationRequested(
String from,
String packetID,
MessageEventManager messageEventManager) {
super.displayedNotificationRequested(from, packetID, messageEventManager);
results.add("displayedNotificationRequested");
}
public void composingNotificationRequested(
String from,
String packetID,
MessageEventManager messageEventManager) {
super.composingNotificationRequested(from, packetID, messageEventManager);
results.add("composingNotificationRequested");
}
public void offlineNotificationRequested(
String from,
String packetID,
MessageEventManager messageEventManager) {
super.offlineNotificationRequested(from, packetID, messageEventManager);
results.add("offlineNotificationRequested");
}
});
// Create the message to send with the roster
Message msg = new Message();
msg.setSubject("Any subject you want");
msg.setBody("An interesting body comes here...");
// Add to the message all the notifications requests (offline, delivered, displayed,
// composing)
MessageEventManager.addNotificationsRequests(msg, true, true, true, true);
// Send the message that contains the notifications request
try {
chat1.sendMessage(msg);
messageEventManager2.sendDisplayedNotification(getBareJID(0), msg.getPacketID());
messageEventManager2.sendComposingNotification(getBareJID(0), msg.getPacketID());
messageEventManager2.sendCancelledNotification(getBareJID(0), msg.getPacketID());
// Wait up to 2 seconds
long initial = System.currentTimeMillis();
while (System.currentTimeMillis() - initial < 2000 &&
(!results.containsAll(resultsExpected))) {
Thread.sleep(100);
}
assertTrue(
"Test failed due to bad results (1)" + resultsExpected,
resultsExpected.containsAll(results));
assertTrue(
"Test failed due to bad results (2)" + results,
results.containsAll(resultsExpected));
} catch (Exception e) {
fail("An error occured sending the message");
}
}
protected int getMaxConnections() {
return 2;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.smartPointers;
import com.intellij.JavaTestUtil;
import com.intellij.codeInsight.JavaCodeInsightTestCase;
import com.intellij.ide.highlighter.HtmlFileType;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.DefaultLogger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.EditorModificationUtil;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.event.EditorEventMulticaster;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.impl.FrozenDocument;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.PlainTextFileType;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Segment;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.PsiJavaFileImpl;
import com.intellij.psi.impl.source.tree.FileElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.stubs.StubTree;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.testFramework.*;
import com.intellij.util.FileContentUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ref.GCUtil;
import com.intellij.util.ref.GCWatcher;
import org.intellij.lang.annotations.Language;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import java.util.*;
@HeavyPlatformTestCase.WrapInCommand
@SkipSlowTestLocally
public class SmartPsiElementPointersTest extends JavaCodeInsightTestCase {
private VirtualFile myRoot;
@Override
protected void setUp() throws Exception {
super.setUp();
String root = JavaTestUtil.getJavaTestDataPath() + "/codeEditor/smartPsiElementPointers";
PsiTestUtil.removeAllRoots(myModule, IdeaTestUtil.getMockJdk17());
myRoot = createTestProjectStructure( root);
}
public void testChangeInDocument() {
PsiClass aClass = myJavaFacade.findClass("AClass", GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
SmartPsiElementPointer pointer = createPointer(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
int offset = aClass.getTextOffset();
insertString(document, offset, "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
private static void insertString(Document document, int offset, String s) {
ApplicationManager.getApplication().runWriteAction(() -> document.insertString(offset, s));
}
// This test is unfair. If pointer would be asked for getElement() between commits it'll never restore again anyway.
//
public void testChangeInDocumentTwice() {
PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
SmartPsiElementPointer pointer = createPointer(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
int offset = aClass.getTextOffset();
insertString(document, offset, "/*");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
insertString(document, offset + 2, "*/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
public void testGetElementWhenDocumentModified() {
PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
SmartPsiElementPointer pointer = createPointer(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
int offset = aClass.getTextOffset();
insertString(document, offset, "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
insertString(document, offset, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
public void testKeepBeltWhenDocumentModified() {
PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
SmartPsiElementPointer pointer = createPointer(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
int offset = aClass.getTextOffset();
insertString(document, offset, "/******/");
pointer.getElement();
insertString(document, offset, "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
public void testRetrieveOnUncommittedDocument() {
ApplicationManager.getApplication().runWriteAction(() -> {
PsiClass aClass = myJavaFacade.findClass("AClass", GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
Document document = documentManager.getDocument(aClass.getContainingFile());
document.insertString(0, "/******/");
SmartPointerEx pointer = createPointer(aClass.getNameIdentifier());
document.insertString(0, "/**/");
documentManager.commitAllDocuments();
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element.getParent() instanceof PsiClass);
assertTrue(element.isValid());
});
}
public void testNoAstLoadingWithoutDocumentChanges() {
PsiClass aClass = myJavaFacade.findClass("Test",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
PsiFileImpl file = (PsiFileImpl)aClass.getContainingFile();
createEditor(file.getVirtualFile());
assertFalse(file.isContentsLoaded());
SmartPointerEx pointer = createPointer(aClass);
assertFalse(file.isContentsLoaded());
//noinspection UnusedAssignment
aClass = null;
gcPointerCache(pointer);
assertNotNull(pointer.getElement());
assertFalse(file.isContentsLoaded());
}
private static void gcPointerCache(SmartPsiElementPointer<?>... pointers) {
GCWatcher.tracking(ContainerUtil.map(pointers, p -> ((SmartPointerEx<?>) p).getCachedElement())).ensureCollected();
for (SmartPsiElementPointer<?> pointer : pointers) {
assertNull(((SmartPointerEx<?>)pointer).getCachedElement());
}
}
public void testTextFileClearingDoesNotCrash() {
configureByText(PlainTextFileType.INSTANCE, "foo bar goo\n");
SmartPsiElementPointer pointer = createPointer(myFile.getFirstChild());
GCWatcher.tracking(myFile.getNode()).ensureCollected();
assertEquals(myFile.getFirstChild(), pointer.getElement());
Document document = myFile.getViewProvider().getDocument();
ApplicationManager.getApplication().runWriteAction(() -> {
document.deleteString(0, document.getTextLength());
GCWatcher.tracking(myFile.getNode()).ensureCollected();
assertEquals(myFile.getFirstChild(), pointer.getElement());
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
GCWatcher.tracking(myFile.getNode()).ensureCollected();
assertEquals(myFile.getFirstChild(), pointer.getElement());
}
public void testChangeInPsi() {
PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
SmartPsiElementPointer pointer = createPointer(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
int offset = aClass.getTextOffset();
insertString(document, offset, "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
public void testPsiChangesWithLazyPointers() {
PsiClass aClass = myJavaFacade.findClass("AClass", GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
final SmartPsiElementPointer<PsiIdentifier> pointer =
createPointer(aClass.getNameIdentifier());
final PsiComment javadoc =
JavaPsiFacade.getElementFactory(aClass.getProject()).createCommentFromText("/** javadoc */", aClass);
ApplicationManager.getApplication().runWriteAction(() -> {
aClass.getParent().addBefore(javadoc, aClass);
});
final PsiIdentifier elt = pointer.getElement();
assertNotNull(elt);
assertSame(elt, aClass.getNameIdentifier());
}
public void testTypePointer() {
PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
final PsiTypeElement typeElement = myJavaFacade.findClass("Test",GlobalSearchScope.allScope(getProject())).getFields()[0].getTypeElement();
SmartPsiElementPointer typePointer = createPointer(typeElement);
SmartPsiElementPointer classPointer = createPointer(aClass);
Document aClassDocument = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
Document testDocument = PsiDocumentManager.getInstance(myProject).getDocument(typeElement.getContainingFile());
assertNotSame(aClassDocument, testDocument);
insertString(aClassDocument, aClass.getTextOffset(), "/**/");
insertString(testDocument, typeElement.getTextOffset(), "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = typePointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiTypeElement);
assertTrue(element.isValid());
assertEquals(classPointer.getElement(), PsiUtil.resolveClassInType(((PsiTypeElement)element).getType()));
}
public void testCreatePointerInBeforeDocumentChange() {
final PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
Document document = PsiDocumentManager.getInstance(myProject).getDocument(aClass.getContainingFile());
final SmartPsiElementPointer[] pointer = new SmartPsiElementPointer[1];
int offset = aClass.getTextOffset();
DocumentListener listener = new DocumentListener() {
@Override
public void beforeDocumentChange(@NotNull DocumentEvent event) {
pointer[0] = createPointer(aClass);
}
};
EditorEventMulticaster multicaster = EditorFactory.getInstance().getEventMulticaster();
multicaster.addDocumentListener(listener, getTestRootDisposable());
try {
insertString(document, offset, "/******/");
}
finally {
multicaster.removeDocumentListener(listener);
}
pointer[0].getElement();
insertString(document, 0, "/**/");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
PsiElement element = pointer[0].getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
}
public void testCreatePointerWhenNoPsiFile() {
// to use weak refs
myPsiManager.runInBatchFilesMode(() -> {
final PsiClass aClass = myJavaFacade.findClass("AClass",GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
VirtualFile vFile = myRoot.findChild("AClass.java");
assertNotNull(vFile);
PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(myProject);
Document document = FileDocumentManager.getInstance().getDocument(vFile);
final SmartPsiElementPointer pointer = createPointer(aClass);
System.gc();
/*
PsiFile psiFile = myPsiManager.getFileManager().getCachedPsiFile(vFile);
assertNull(psiFile);
*/
insertString(document, 0, "class Foo{}\n");
PsiElement element = pointer.getElement();
assertEquals(aClass, element);
insertString(document, 0, "/**/");
psiDocumentManager.commitAllDocuments();
if (aClass.isValid()) {
aClass.getChildren();
}
element = pointer.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
return null;
});
}
public void testReplaceFile() {
VirtualFile vfile = createChildData(myRoot, "X.java");
@Language("JAVA")
String text = "public class X { public int X; }";
setFileText(vfile, text);
PsiClass aClass = myJavaFacade.findClass("X", GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
assertTrue(aClass.isValid());
SmartPsiElementPointer classp = createPointer(aClass);
SmartPsiElementPointer filep = createPointer(aClass.getContainingFile());
FileContentUtil.reparseFiles(myProject, Collections.singleton(vfile), true);
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertFalse(aClass.isValid());
PsiElement element = classp.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiClass);
assertTrue(element.isValid());
assertEquals(vfile, element.getContainingFile().getVirtualFile());
element = filep.getElement();
assertNotNull(element);
assertTrue(element instanceof PsiFile);
assertTrue(element.isValid());
assertEquals(vfile, element.getContainingFile().getVirtualFile());
}
public void testCreatePointerDoesNotLoadPsiTree() {
VirtualFile vfile = createChildData(myRoot, "X.java");
@Language("JAVA")
String text = "public class X { public int X; }";
setFileText(vfile, text);
PsiClass aClass = myJavaFacade.findClass("X", GlobalSearchScope.allScope(getProject()));
assertNotNull(aClass);
assertTrue(aClass.isValid());
PsiFileImpl file = (PsiFileImpl)aClass.getContainingFile();
assertTreeLoaded(file, false);
SmartPsiElementPointer p = createPointer(aClass);
assertNotNull(p);
assertTreeLoaded(file, false);
assertInstanceOf(p.getElement(), PsiClass.class);
assertTreeLoaded(file, false);
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
Document document = documentManager.getDocument(file);
insertString(document, 0, "/** asdasd */");
documentManager.commitAllDocuments();
// loaded tree
assertTreeLoaded(file, true);
assertInstanceOf(p.getElement(), PsiClass.class);
assertTreeLoaded(file, true);
}
private static void assertTreeLoaded(PsiFileImpl file, boolean loaded) {
FileElement treeElement = file.getTreeElement();
assertEquals(loaded, treeElement != null);
StubTree stubTree = file.getStubTree();
assertEquals(loaded, stubTree == null);
}
public void testPointerDisambiguationAfterDupLine() {
String text = "class XXX{ void foo() { \n" +
" <caret>foo();\n" +
"}}";
PsiJavaFile file = (PsiJavaFile)configureByText(JavaFileType.INSTANCE, text);
PsiClass aClass = file.getClasses()[0];
assertNotNull(aClass);
PsiReferenceExpression ref1 = PsiTreeUtil.getParentOfType(PsiUtilBase.getElementAtCaret(getEditor()), PsiReferenceExpression.class);
SmartPsiElementPointer pointer1 = createPointer(ref1);
ctrlD();
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
Set<PsiReferenceExpression> refs = new HashSet<>();
int offset=0;
while (true) {
offset = getEditor().getDocument().getText().indexOf("foo();", offset+1);
if (offset == -1) break;
PsiReferenceExpression ref2 = PsiTreeUtil.getParentOfType(getFile().findElementAt(offset), PsiReferenceExpression.class);
refs.add(ref2);
}
refs.remove(ref1);
assertEquals(1, refs.size());
PsiReferenceExpression ref2 = refs.iterator().next();
assertNotSame(ref1, ref2);
SmartPsiElementPointer pointer2 = createPointer(ref2);
assertNotSame(pointer1, pointer2);
PsiElement element1 = pointer1.getElement();
PsiElement element2 = pointer2.getElement();
assertNotNull(element1);
assertNotNull(element2);
assertNotSame(element1, element2);
assertFalse(getPointerManager().pointToTheSameElement(pointer1, pointer2));
}
public void testPointersRefCount() {
@Language("JAVA")
String text = "class X{}";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
PsiClass aClass = ((PsiClassOwner)file).getClasses()[0];
SmartPointerManagerImpl smartPointerManager = getPointerManager();
SmartPsiElementPointer pointer1 = createPointer(aClass);
SmartPsiElementPointer pointer2 = createPointer(aClass);
assertSame(pointer1, pointer2);
assertNotNull(pointer1.getRange());
smartPointerManager.removePointer(pointer2);
assertNotNull(pointer1.getRange());
smartPointerManager.removePointer(pointer1);
assertNull(pointer1.getRange());
}
private SmartPointerManagerImpl getPointerManager() {
return (SmartPointerManagerImpl)SmartPointerManager.getInstance(myProject);
}
public void testPointersRefCountSaturated() {
@Language("JAVA")
String text = "class X{}";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
PsiClass aClass = ((PsiClassOwner)file).getClasses()[0];
SmartPointerManagerImpl smartPointerManager = getPointerManager();
SmartPsiElementPointerImpl pointer1 = (SmartPsiElementPointerImpl)createPointer(aClass);
for (int i=0; i<1000; i++) {
SmartPsiElementPointer<PsiClass> pointer2 = createPointer(aClass);
assertSame(pointer1, pointer2);
}
assertNotNull(pointer1.getRange());
assertEquals(Byte.MAX_VALUE, pointer1.incrementAndGetReferenceCount(0));
for (int i=0; i<1100; i++) {
smartPointerManager.removePointer(pointer1);
Assert.assertNotNull(pointer1.getRange());
}
}
public void testSmartPointerCreationDoesNotLoadDocument() {
PsiPackage aPackage = myJavaFacade.findPackage("java.io");
for (PsiClass aClass : aPackage.getClasses()) {
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
PsiFile file = aClass.getContainingFile();
Document document = documentManager.getCachedDocument(file);
if (document == null) { //ignore already loaded documents
createPointer(aClass);
assertNull(documentManager.getCachedDocument(file));
//System.out.println("file = " + file);
}
else {
LOG.debug("already loaded file = " + file);
}
}
}
public void testSmartPointersSurvivePsiFileUnload() {
final VirtualFile vfile = createChildData(myRoot, "X.txt");
String xxx = "xxx";
String text = xxx + " " + xxx + " " + xxx;
setFileText(vfile, text);
PsiFile psiFile = PsiManager.getInstance(getProject()).findFile(vfile);
assertTrue(String.valueOf(psiFile), psiFile instanceof PsiPlainTextFile);
SmartPointerManagerImpl manager = getPointerManager();
TextRange range1 = TextRange.from(text.indexOf(xxx), xxx.length());
SmartPsiFileRange pointer1 = manager.createSmartPsiFileRangePointer(psiFile, range1);
TextRange range2 = TextRange.from(text.lastIndexOf(xxx), xxx.length());
SmartPsiFileRange pointer2 = manager.createSmartPsiFileRangePointer(psiFile, range2);
assertNotNull(FileDocumentManager.getInstance().getCachedDocument(vfile));
GCWatcher watcher = GCWatcher.tracking(psiFile, FileDocumentManager.getInstance().getDocument(vfile));
//noinspection UnusedAssignment
psiFile = null;
watcher.ensureCollected();
assertNull(FileDocumentManager.getInstance().getCachedDocument(vfile));
assertEquals(pointer1.getRange(), range1);
WriteCommandAction.runWriteCommandAction(getProject(), () -> insertString(FileDocumentManager.getInstance().getDocument(vfile), 0, " "));
assertEquals(range1.shiftRight(1), pointer1.getRange());
assertEquals(range2.shiftRight(1), pointer2.getRange());
}
public void testInXml() {
@Language("HTML")
String text = "<!doctype html>\n" +
"<html>\n" +
" <fieldset></fieldset>\n" +
" <select></select>\n" +
"\n" +
" <caret>\n" +
"</html>";
final PsiFile file = configureByText(HtmlFileType.INSTANCE,
text
);
final XmlTag fieldSet = PsiTreeUtil.getParentOfType(file.findElementAt(file.getText().indexOf("fieldset")), XmlTag.class);
assertNotNull(fieldSet);
assertEquals("fieldset", fieldSet.getName());
final XmlTag select = PsiTreeUtil.getParentOfType(file.findElementAt(file.getText().indexOf("select")), XmlTag.class);
assertNotNull(select);
assertEquals("select", select.getName());
final SmartPsiElementPointer<XmlTag> fieldSetPointer = createPointer(fieldSet);
final SmartPsiElementPointer<XmlTag> selectPointer = createPointer(select);
WriteCommandAction.runWriteCommandAction(getProject(), () -> insertString(getEditor().getDocument(), getEditor().getCaretModel().getOffset(), "<a></a>"));
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
final XmlTag newFieldSet = fieldSetPointer.getElement();
assertNotNull(newFieldSet);
assertEquals("fieldset", newFieldSet.getName());
final XmlTag newSelect = selectPointer.getElement();
assertNotNull(newSelect);
assertEquals("select", newSelect.getName());
}
public void testInXml2() {
String text = "<html>\n" +
" <ul class=\"dropdown-menu\">\n" +
" <li><a href=\"#\">One more separated link</a></li>\n" +
" </ul>\n" +
"<caret>\n" +
"</html>";
final PsiFile file = configureByText(XmlFileType.INSTANCE,
text
);
final XmlTag ul = PsiTreeUtil.getParentOfType(file.findElementAt(file.getText().indexOf("ul")), XmlTag.class);
assertNotNull(ul);
assertEquals("ul", ul.getName());
assertEquals("dropdown-menu", ul.getAttributeValue("class"));
SmartPsiElementPointer<XmlTag> ulPointer = createPointer(ul);
WriteCommandAction.runWriteCommandAction(getProject(), () -> {
int offset = getEditor().getCaretModel().getOffset();
insertString(getEditor().getDocument(), offset, " <ul class=\"nav navbar-nav navbar-right\">\n" +
" </ul>\n");
});
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
final XmlTag newUl = ulPointer.getElement();
assertNotNull(newUl);
assertEquals("ul", newUl.getName());
assertEquals("dropdown-menu", newUl.getAttributeValue("class"));
}
public void testInsertImport() {
@Language("JAVA")
String text = "class S {\n" +
"}";
final PsiFile file = configureByText(JavaFileType.INSTANCE,
text);
PsiClass aClass = ((PsiJavaFile)file).getClasses()[0];
WriteCommandAction.runWriteCommandAction(getProject(), () -> insertString(getEditor().getDocument(), 0, "import java.util.Map;\n"));
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
PsiClass aClass2 = ((PsiJavaFile)file).getClasses()[0];
assertSame(aClass, aClass2);
}
public void testEqualPointerRangesWhenCreatedFromStubAndAST() {
@Language("JAVA")
String text = "class S {\n" +
"}";
final PsiFile file = configureByText(JavaFileType.INSTANCE,
text);
PsiClass aClass = ((PsiJavaFile)file).getClasses()[0];
assertNotNull(((PsiFileImpl)file).getStubTree());
final SmartPointerManager manager = getPointerManager();
final SmartPsiElementPointer<PsiClass> pointer1 = createPointer(aClass);
Segment range1 = pointer1.getRange();
manager.removePointer(pointer1);
final FileASTNode node = file.getNode();
final SmartPsiElementPointer<PsiClass> pointer2 = createPointer(aClass);
assertEquals(range1, pointer2.getRange());
assertNotNull(node);
}
public void testEqualPointersWhenCreatedFromStubAndAST() {
PsiJavaFile file = (PsiJavaFile)myJavaFacade.findClass("AClass", GlobalSearchScope.allScope(getProject())).getContainingFile();
int hash1 = file.getClasses()[0].hashCode();
final SmartPsiElementPointer<PsiClass> pointer1 = createPointer(file.getClasses()[0]);
assertNotNull(((PsiFileImpl)file).getStubTree());
GCWatcher.tracking(((PsiFileImpl)file).getStubTree(), file.getClasses()[0]).ensureCollected();
final FileASTNode node = file.getNode();
final SmartPsiElementPointer<PsiClass> pointer2 = createPointer(file.getClasses()[0]);
assertFalse(hash1 == file.getClasses()[0].hashCode());
assertEquals(pointer1, pointer2);
assertEquals(pointer1.getRange(), pointer2.getRange());
assertNotNull(node);
}
public void testLargeFileWithManyChangesPerformance() throws Exception {
String text = StringUtil.repeat("foo foo \n", 50000);
PsiFile file = createFile("a.txt", text);
final TextRange range = TextRange.from(10, 10);
final SmartPsiFileRange pointer = getPointerManager().createSmartPsiFileRangePointer(file, range);
final Document document = file.getViewProvider().getDocument();
assertNotNull(document);
WriteAction.run(() -> PlatformTestUtil.startPerformanceTest("smart pointer range update", 10_000, () -> {
for (int i = 0; i < 10000; i++) {
document.insertString(i * 20 + 100, "x\n");
assertFalse(PsiDocumentManager.getInstance(myProject).isCommitted(document));
assertEquals(range, pointer.getRange());
}
}).setup(() -> {
document.setText(text);
assertEquals(range, pointer.getRange());
}).assertTiming());
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertEquals(range, pointer.getRange());
}
public void testConvergingRanges() {
configureByText(PlainTextFileType.INSTANCE, "aba");
final Document document = myFile.getViewProvider().getDocument();
assertNotNull(document);
SmartPsiFileRange range1 = getPointerManager().createSmartPsiFileRangePointer(myFile, TextRange.create(0, 2));
SmartPsiFileRange range2 = getPointerManager().createSmartPsiFileRangePointer(myFile, TextRange.create(1, 3));
ApplicationManager.getApplication().runWriteAction(() -> {
document.deleteString(0, 1);
document.deleteString(1, 2);
});
assertEquals(TextRange.create(0, 1), range1.getRange());
assertEquals(TextRange.create(0, 1), range2.getRange());
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertEquals(TextRange.create(0, 1), range1.getRange());
assertEquals(TextRange.create(0, 1), range2.getRange());
insertString(document, 0, "a");
assertEquals(TextRange.create(1, 2), range1.getRange());
assertEquals(TextRange.create(1, 2), range2.getRange());
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertEquals(TextRange.create(1, 2), range1.getRange());
assertEquals(TextRange.create(1, 2), range2.getRange());
}
public void testMoveText() {
@Language("JAVA")
String text = "class C1{}\nclass C2 {}";
PsiJavaFile file = (PsiJavaFile)configureByText(JavaFileType.INSTANCE, text);
DocumentEx document = (DocumentEx)file.getViewProvider().getDocument();
SmartPsiElementPointer<PsiClass> pointer1 =
createPointer(file.getClasses()[0]);
SmartPsiElementPointer<PsiClass> pointer2 =
createPointer(file.getClasses()[1]);
assertEquals("C1", pointer1.getElement().getName());
assertEquals("C2", pointer2.getElement().getName());
gcPointerCache(pointer1, pointer2);
TextRange range = file.getClasses()[1].getTextRange();
ApplicationManager.getApplication().runWriteAction(() -> document.moveText(range.getStartOffset(), range.getEndOffset(), 0));
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertEquals("C1", pointer1.getElement().getName());
assertEquals("C2", pointer2.getElement().getName());
}
public void testNonPhysicalFile() {
PsiJavaFile file = (PsiJavaFile)myJavaFacade.findClass("AClass", GlobalSearchScope.allScope(getProject())).getContainingFile().copy();
SmartPsiFileRange pointer = getPointerManager().createSmartPsiFileRangePointer(file, TextRange.create(1, 2));
insertString(file.getViewProvider().getDocument(), 0, " ");
assertEquals(TextRange.create(2, 3), pointer.getRange());
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertEquals(TextRange.create(2, 3), pointer.getRange());
}
public void testUpdateAfterInsertingIdenticalText() {
String text = "class Foo {\n" +
" void m() {\n" +
" }\n" +
"<caret>}\n";
PsiJavaFile file = (PsiJavaFile)configureByText(JavaFileType.INSTANCE, text);
PsiMethod method = file.getClasses()[0].getMethods()[0];
TextRange originalRange = method.getTextRange();
SmartPsiElementPointer pointer = createPointer(method);
ApplicationManager.getApplication().runWriteAction(() -> EditorModificationUtil.insertStringAtCaret(myEditor, " void m() {\n" +
" }\n"));
PsiDocumentManager.getInstance(myProject).commitDocument(myEditor.getDocument());
PsiElement element = pointer.getElement();
assertNotNull(element);
TextRange newRange = element.getTextRange();
assertEquals(originalRange, newRange);
}
public void testAnchorInfoSurvivesPsiChange() {
@Language("JAVA")
String text = "class C1{}\nclass C2 {}";
PsiJavaFile file = (PsiJavaFile)configureByText(JavaFileType.INSTANCE, text);
SmartPsiElementPointer<PsiClass> pointer = createPointer(file.getClasses()[1]);
gcPointerCache(pointer);
ApplicationManager.getApplication().runWriteAction(() -> file.getClasses()[0].delete());
assertNotNull(pointer.getElement());
}
public void testPointerToEmptyElement() {
@Language("JAVA")
String text = "class Foo {\n" +
" Test<String> test = new Test<>();\n" +
"}";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, file.getText().indexOf("<>"), PsiJavaCodeReferenceElement.class, false);
SmartPointerEx pointer = createPointer(ref.getParameterList().getTypeParameterElements()[0]);
//noinspection UnusedAssignment
ref = null;
gcPointerCache(pointer);
assertInstanceOf(pointer.getElement(), PsiTypeElement.class);
}
public void testPointerToEmptyElement2() {
@Language("JAVA")
String text = "class Foo {\n" +
" void foo() {}\n" +
"}";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
PsiMethod method = PsiTreeUtil.findElementOfClassAtOffset(file, file.getText().indexOf("void"), PsiMethod.class, false);
SmartPointerEx pointer1 = createPointer(method.getModifierList());
SmartPointerEx pointer2 = createPointer(method.getTypeParameterList());
//noinspection UnusedAssignment
method = null;
gcPointerCache(pointer1, pointer2);
assertInstanceOf(pointer1.getElement(), PsiModifierList.class);
assertInstanceOf(pointer2.getElement(), PsiTypeParameterList.class);
}
public void testPointerToReferenceSurvivesRename() {
@Language("JAVA")
String text = "class Foo extends Bar {}";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, file.getText().indexOf("Bar"), PsiJavaCodeReferenceElement.class, false);
SmartPointerEx pointer = createPointer(ref);
//noinspection UnusedAssignment
ref = null;
gcPointerCache(pointer);
ref = PsiTreeUtil.findElementOfClassAtOffset(file, file.getText().indexOf("Bar"), PsiJavaCodeReferenceElement.class, false);
final PsiJavaCodeReferenceElement finalRef = ref;
ApplicationManager.getApplication().runWriteAction(() -> {
finalRef.handleElementRename("BarImpl");
});
assertNotNull(pointer.getElement());
}
public void testNonAnchoredStubbedElement() {
@Language("JAVA")
String text = "class Foo { { @NotNull String foo; } }";
PsiFile file = configureByText(JavaFileType.INSTANCE, text);
StubTree stubTree = ((PsiFileImpl)file).getStubTree();
assertNotNull(stubTree);
PsiElement anno = stubTree.getPlainList().stream().map(StubElement::getPsi).filter(psiElement -> psiElement instanceof PsiAnnotation).findFirst().get();
SmartPsiElementPointer<PsiElement> pointer = createPointer(anno);
assertNotNull(((PsiFileImpl)file).getStubTree());
//noinspection UnusedAssignment
stubTree = null;
//noinspection UnusedAssignment
anno = null;
gcPointerCache(pointer);
insertString(file.getViewProvider().getDocument(), 0, " ");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
assertNotNull(pointer.getElement());
}
public void testManyPsiChangesWithManySmartPointersPerformance() throws Exception {
String eachTag = "<a>\n" + StringUtil.repeat(" <a> </a>\n", 9) + "</a>\n";
XmlFile file = (XmlFile)createFile("a.xml", "<root>\n" + StringUtil.repeat(eachTag, 500) + "</root>");
List<XmlTag> tags = new ArrayList<>(PsiTreeUtil.findChildrenOfType(file.getDocument(), XmlTag.class));
List<SmartPsiElementPointer> pointers = ContainerUtil.map(tags, this::createPointer);
ApplicationManager.getApplication().runWriteAction(() -> PlatformTestUtil.startPerformanceTest("smart pointer range update after PSI change", 21000, () -> {
for (int i = 0; i < tags.size(); i++) {
XmlTag tag = tags.get(i);
SmartPsiElementPointer pointer = pointers.get(i);
assertEquals(tag.getName().length(), TextRange.create(pointer.getRange()).getLength());
assertEquals(tag.getName().length(), TextRange.create(pointer.getPsiRange()).getLength());
tag.setName("bar1" + (i % 10));
assertEquals(tag.getName().length(), TextRange.create(pointer.getRange()).getLength());
assertEquals(tag.getName().length(), TextRange.create(pointer.getPsiRange()).getLength());
}
PostprocessReformattingAspect.getInstance(myProject).doPostponedFormatting();
}).useLegacyScaling().assertTiming());
}
@NotNull
private <T extends PsiElement> SmartPointerEx<T> createPointer(T element) {
return (SmartPointerEx<T>)getPointerManager().createSmartPsiElementPointer(element);
}
public void testCommentingField() throws Exception {
@Language("JAVA")
String text = "class A {\n" +
" int x;\n" +
" int y;\n" +
"}";
PsiJavaFile file = (PsiJavaFile)createFile("a.java", text);
PsiField[] fields = file.getClasses()[0].getFields();
SmartPointerEx<PsiField> pointer0 = createPointer(fields[0]);
SmartPointerEx<PsiField> pointer1 = createPointer(fields[1]);
WriteCommandAction.runWriteCommandAction(myProject, () -> {
Document document = file.getViewProvider().getDocument();
assert document != null;
document.insertString(file.getText().indexOf("int"), "//");
commitDocument(document);
});
assertNull(pointer0.getElement());
assertEquals("y", pointer1.getElement().getName());
}
public void testAnchorInfoHasRange() throws Exception {
@Language("JAVA")
String text = "class C1{}";
PsiJavaFile file = (PsiJavaFile)createFile("a.java", text);
assertNotNull(((PsiFileImpl) file).getStubTree());
PsiClass psiClass = file.getClasses()[0];
Segment range = createPointer(psiClass).getRange();
assertNotNull(range);
assertEquals(psiClass.getNameIdentifier().getTextRange(), TextRange.create(range));
file = (PsiJavaFile)createFile("b.java", "class C2{}");
assertNotNull(((PsiFileImpl) file).getStubTree());
psiClass = file.getClasses()[0];
range = createPointer(psiClass).getPsiRange();
assertNotNull(range);
assertEquals(psiClass.getNameIdentifier().getTextRange(), TextRange.create(range));
}
public void testManySmartPointersCreationDeletionPerformance() throws Exception {
String text = StringUtil.repeatSymbol(' ', 100000);
PsiFile file = createFile("a.txt", text);
PlatformTestUtil.startPerformanceTest(getTestName(false), 650, () -> {
List<SmartPsiFileRange> pointers = new ArrayList<>();
for (int i = 0; i < text.length() - 1; i++) {
pointers.add(getPointerManager().createSmartPsiFileRangePointer(file, new TextRange(i, i + 1)));
}
Collections.shuffle(pointers);
for (SmartPsiFileRange pointer : pointers) {
getPointerManager().removePointer(pointer);
}
}).assertTiming();
}
public void testDifferentHashCodesForDifferentElementsInOneFile() throws Exception {
@Language("JAVA")
String text = "class Foo { void foo(); }";
PsiClass clazz = ((PsiJavaFile)createFile("a.java", text)).getClasses()[0];
assertFalse(createPointer(clazz).hashCode() == createPointer(clazz.getMethods()[0]).hashCode());
}
public void testImportListPointerSurvivesImportAddition() throws Exception {
@Language("JAVA")
String text = "import foo.Bar;\nclass Foo {}";
PsiJavaFile file = (PsiJavaFile)createFile("a.java", text);
SmartPointerEx<PsiImportList> pointer = createPointer(file.getImportList());
Document document = file.getViewProvider().getDocument();
WriteCommandAction.runWriteCommandAction(myProject, () -> {
document.insertString(document.getText().indexOf("class"), "import foo.Goo;\n");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
assertEquals(file.getImportList(), pointer.getElement());
assertSize(2, file.getImportList().getImportStatements());
WriteCommandAction.runWriteCommandAction(myProject, () -> {
document.insertString(0, " ");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
document.deleteString(0, document.getText().indexOf("\nimport"));
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
assertEquals(file.getImportList(), pointer.getElement());
assertSize(1, file.getImportList().getImportStatements());
}
public void testNoAstLoadingOnFileRename() throws Exception {
@Language("JAVA")
String text = "class A {}";
PsiFile psiFile = createFile("a.java", text);
SmartPointerEx<PsiClass> pointer = createPointer(((PsiJavaFile)psiFile).getClasses()[0]);
assertFalse(((PsiFileImpl)psiFile).isContentsLoaded());
VirtualFile file = psiFile.getVirtualFile();
WriteAction.run(() -> file.rename(this, "b.java"));
assertTrue(psiFile.isValid());
assertEquals(((PsiJavaFile)psiFile).getClasses()[0], pointer.getElement());
assertFalse(((PsiFileImpl)psiFile).isContentsLoaded());
}
public void testDoubleRemoveIsAnError() throws Exception {
DefaultLogger.disableStderrDumping(getTestRootDisposable());
@Language("JAVA")
String text = "class A {}";
SmartPointerEx<PsiFile> pointer = createPointer(createFile("a.java", text));
getPointerManager().removePointer(pointer);
try {
getPointerManager().removePointer(pointer);
fail("Should have failed");
}
catch (AssertionError e) {
assertTrue(e.getMessage(), e.getMessage().contains("Double smart pointer removal"));
}
}
public void testStubSmartPointersAreCreatedEvenInAstPresence() throws Exception {
@Language("JAVA")
String text = "class A {}";
PsiJavaFileImpl file = (PsiJavaFileImpl)createFile("a.java", text);
assertNotNull(file.getNode());
SmartPointerEx<PsiClass> pointer = createPointer(file.getClasses()[0]);
GCWatcher.tracking(file.getNode()).ensureCollected();
assertNull(file.getTreeElement());
assertNotNull(pointer.getElement());
assertNull(file.getTreeElement());
}
public void testSurviveAfterWholeTextReplace() throws Exception {
@Language("JAVA")
String text = "class A {" +
"void foo() {\n" +
" //comment\n" +
"\n}" +
"\n" +
"void bar() {}\n" +
"void bar2() {}\n" +
"void bar3() {}\n" +
"void bar4() {}\n" +
"}";
PsiJavaFileImpl file = (PsiJavaFileImpl)createFile("a.java", text);
SmartPointerEx<PsiMethod> pointer = createPointer(file.getClasses()[0].getMethods()[1]);
WriteCommandAction.runWriteCommandAction(myProject, () -> {
file.getViewProvider().getDocument().setText(text.replace("//comment", "//a\n//multiline\n// comment"));
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
assertEquals(file.getClasses()[0].getMethods()[1], pointer.getElement());
}
public void testRangePointerSurvivesLanguageChange() throws Exception {
PsiFile file = createFile("a.java", "abc");
SmartPsiFileRange pointer = getPointerManager().createSmartPsiFileRangePointer(file, TextRange.from(0, 1));
assertInstanceOf(pointer.getElement(), PsiJavaFile.class);
WriteCommandAction.runWriteCommandAction(myProject, () -> {
Document document = file.getViewProvider().getDocument();
file.setName("a.txt");
document.insertString(0, "ddd");
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
assertInstanceOf(pointer.getElement(), PsiPlainTextFile.class);
assertEquals(TextRange.from(3, 1), TextRange.create(pointer.getRange()));
}
public void testFileRangeWithUnicode() throws Exception {
PsiFile file = createFile("a.java", "\u042b");
assertEquals(1, file.getTextLength());
SmartPsiFileRange pointer = getPointerManager().createSmartPsiFileRangePointer(file, TextRange.from(0, file.getTextLength()));
assertEquals(TextRange.from(0, 1), TextRange.create(pointer.getPsiRange()));
assertEquals(TextRange.from(0, 1), TextRange.create(pointer.getRange()));
}
public void testSurviveOnDocumentChangeAround() throws Exception {
@Language("JAVA")
String text = "class Foo {}";
PsiFile file = createFile("a.java", text);
SmartPointerEx<PsiClass> pointer = createPointer(((PsiJavaFile)file).getClasses()[0]);
WriteCommandAction.runWriteCommandAction(myProject, () -> {
@Language("JAVA")
String text2 = " class Foo {} ";
file.getViewProvider().getDocument().setText(text2);
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
});
assertNotNull(pointer.getElement());
}
// if the assertion is to be removed, please ensure that the test in IDEA-182567 passes
public void testCreatingPointerInsidePsiListenerProhibited() throws Exception {
DefaultLogger.disableStderrDumping(getTestRootDisposable());
@Language("JAVA")
String text = "class Foo { { int a; } }";
PsiFile file = createFile("a.java", text);
try {
WriteCommandAction.runWriteCommandAction(myProject, () -> {
PsiLocalVariable var = PsiTreeUtil.findElementOfClassAtOffset(file, file.getText().indexOf("int"), PsiLocalVariable.class, false);
PsiTreeChangeAdapter listener = new PsiTreeChangeAdapter() {
@Override
public void childAdded(@NotNull PsiTreeChangeEvent event) {
createPointer(var);
}
};
PsiManager.getInstance(getProject()).addPsiTreeChangeListener(listener);
try {
var.getModifierList().setModifierProperty(PsiModifier.FINAL, true);
}
finally {
PsiManager.getInstance(getProject()).removePsiTreeChangeListener(listener);
}
});
fail();
}
catch (AssertionError e) {
assertTrue(e.getMessage(), e.getMessage().contains("must not be created"));
}
}
public void testCanRestoreErrorElementsAtSameOffset() throws Exception {
String text = "class Foo { void foo() { if(IdeaLogger.ourErrorsOccurred != null) try(hrow IdeaLogger.ourErrorsOccurred; } }";
PsiFile file = createFile("a.java",
text);
for (PsiElement element : SyntaxTraverser.psiTraverser(file)) {
SmartPointerEx<PsiElement> pointer = createPointer(element);
assertEquals(element, pointer.getElement());
}
}
public void testDoNotLeakLightVirtualFileSmartPointersReachableViaDocument() {
Key<SmartPointerEx<PsiClass>> key = Key.create("smart pointer test");
Runnable createFileAndPointer = () -> {
PsiFile file = PsiFileFactory.getInstance(myProject).createFileFromText("a.java", JavaLanguage.INSTANCE, "class Foo {}", true, false);
SmartPointerEx<PsiClass> pointer = createPointer(((PsiJavaFile)file).getClasses()[0]);
file.getViewProvider().getDocument().putUserData(key, pointer);
};
createFileAndPointer.run();
GCUtil.tryGcSoftlyReachableObjects();
SmartPointerTracker.processQueue();
LeakHunter.checkLeak(LeakHunter.allRoots(), Document.class, d -> !(d instanceof FrozenDocument) && d.getUserData(key) != null);
}
public void testNonPhysicalPointersSurviveLikePhysical() {
String text = "class Foo { }";
PsiFile file = PsiFileFactory.getInstance(myProject).createFileFromText("a.java", JavaLanguage.INSTANCE, text, false, false);
Document document = file.getViewProvider().getDocument();
PsiWhiteSpace whiteSpace = assertInstanceOf(file.findElementAt(text.indexOf('{') + 1), PsiWhiteSpace.class);
SmartPointerEx<PsiWhiteSpace> pointer = createPointer(whiteSpace);
whiteSpace.replace(PsiParserFacade.SERVICE.getInstance(myProject).createWhiteSpaceFromText(" "));
assertFalse(whiteSpace.isValid());
assertSame(file.findElementAt(text.indexOf('{') + 1), pointer.getElement());
assertEquals("class Foo { }", document.getText());
}
public void testPointedBinaryFilesCanBeGcEd() throws Exception {
VirtualFile vFile = createFile("a.jar", "").getVirtualFile();
assertInstanceOf(getPsiManager().findFile(vFile), PsiBinaryFile.class);
SmartPointerEx<PsiFile> pointer = createPointer(getPsiManager().findFile(vFile));
GCWatcher.tracking(getPsiManager().findFile(vFile)).ensureCollected();
assertInstanceOf(pointer.getElement(), PsiBinaryFile.class);
}
public void testRangePointersSurviveNonPhysicalTextAddition() {
checkRangePointersSurviveNonPhysicalTextAddition(true);
checkRangePointersSurviveNonPhysicalTextAddition(false);
}
private void checkRangePointersSurviveNonPhysicalTextAddition(boolean eventSystemEnabled) {
String text1 = "import a.Foo1Bar;";
String text2 = "import a.Foo2Bar;";
PsiFileFactory factory = PsiFileFactory.getInstance(myProject);
PsiJavaFile file = (PsiJavaFile)factory
.createFileFromText("a.java", JavaLanguage.INSTANCE, text2, eventSystemEnabled, false);
Document document = file.getViewProvider().getDocument();
PsiImportStatement statement = file.getImportList().getImportStatements()[0];
SmartPsiFileRange pointer = getPointerManager().createSmartPsiFileRangePointer(file, statement.getImportReference().getTextRange());
WriteCommandAction.runWriteCommandAction(myProject, () -> {
file.getImportList().add(createImportFromText(text1));
});
assertEquals(text2, statement.getText());
assertEquals(statement.getImportReference().getTextRange(), pointer.getRange());
assertTrue(document.getText(), document.getText().startsWith(text1));
}
private PsiImportStatement createImportFromText(String text) {
return ((PsiJavaFile)PsiFileFactory.getInstance(myProject).createFileFromText("a.java", JavaLanguage.INSTANCE, text))
.getImportList().getImportStatements()[0];
}
}
| |
/*
* Copyright (c) 1998, 2006, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.jmx.snmp.daemon;
// java imports
//
import java.util.Vector;
import java.util.Enumeration;
// jmx imports
//
import com.sun.jmx.snmp.SnmpOid;
// SNMP Runtime imports
//
import com.sun.jmx.snmp.agent.SnmpMibAgent;
/**
* The class is used for building a tree representation of the different
* root oids of the supported MIBs. Each node is associated to a specific MIB.
*/
final class SnmpMibTree {
public SnmpMibTree() {
defaultAgent= null;
root= new TreeNode(-1, null, null);
}
public void setDefaultAgent(SnmpMibAgent def) {
defaultAgent= def;
root.agent= def;
}
public SnmpMibAgent getDefaultAgent() {
return defaultAgent;
}
public void register(SnmpMibAgent agent) {
root.registerNode(agent);
}
public void register(SnmpMibAgent agent, long[] oid) {
root.registerNode(oid, 0, agent);
}
public SnmpMibAgent getAgentMib(SnmpOid oid) {
TreeNode node= root.retrieveMatchingBranch(oid.longValue(), 0);
if (node == null)
return defaultAgent;
else
if(node.getAgentMib() == null)
return defaultAgent;
else
return node.getAgentMib();
}
public void unregister(SnmpMibAgent agent, SnmpOid[] oids) {
for(int i = 0; i < oids.length; i++) {
long[] oid = oids[i].longValue();
TreeNode node = root.retrieveMatchingBranch(oid, 0);
if (node == null)
continue;
node.removeAgent(agent);
}
}
public void unregister(SnmpMibAgent agent) {
root.removeAgentFully(agent);
}
/*
public void unregister(SnmpMibAgent agent) {
long[] oid= agent.getRootOid();
TreeNode node= root.retrieveMatchingBranch(oid, 0);
if (node == null)
return;
node.removeAgent(agent);
}
*/
public void printTree() {
root.printTree(">");
}
private SnmpMibAgent defaultAgent;
private TreeNode root;
// A SnmpMibTree object is a tree of TreeNode
//
final class TreeNode {
void registerNode(SnmpMibAgent agent) {
long[] oid= agent.getRootOid();
registerNode(oid, 0, agent);
}
TreeNode retrieveMatchingBranch(long[] oid, int cursor) {
TreeNode node= retrieveChild(oid, cursor);
if (node == null)
return this;
if (children.size() == 0) {
// In this case, the node does not have any children. So no point to
// continue the search ...
return node;
}
if( cursor + 1 == oid.length) {
// In this case, the oid does not have any more element. So the search
// is over.
return node;
}
TreeNode n = node.retrieveMatchingBranch(oid, cursor + 1);
//If the returned node got a null agent, we have to replace it by
//the current one (in case it is not null)
//
return n.agent == null ? this : n;
}
SnmpMibAgent getAgentMib() {
return agent;
}
public void printTree(String ident) {
StringBuffer buff= new StringBuffer();
if (agents == null) {
return;
}
for(Enumeration e= agents.elements(); e.hasMoreElements(); ) {
SnmpMibAgent mib= (SnmpMibAgent) e.nextElement();
if (mib == null)
buff.append("empty ");
else
buff.append(mib.getMibName() + " ");
}
ident+= " ";
if (children == null) {
return;
}
for(Enumeration e= children.elements(); e.hasMoreElements(); ) {
TreeNode node= (TreeNode) e.nextElement();
node.printTree(ident);
}
}
// PRIVATE STUFF
//--------------
/**
* Only the treeNode class can create an instance of treeNode.
* The creation occurs when registering a new oid.
*/
private TreeNode(long nodeValue, SnmpMibAgent agent, TreeNode sup) {
this.nodeValue= nodeValue;
this.parent= sup;
agents.addElement(agent);
}
private void removeAgentFully(SnmpMibAgent agent) {
Vector<TreeNode> v = new Vector<TreeNode>();
for(Enumeration<TreeNode> e= children.elements();
e.hasMoreElements(); ) {
TreeNode node= e.nextElement();
node.removeAgentFully(agent);
if(node.agents.isEmpty())
v.add(node);
}
for(Enumeration<TreeNode> e= v.elements(); e.hasMoreElements(); ) {
children.removeElement(e.nextElement());
}
removeAgent(agent);
}
private void removeAgent(SnmpMibAgent mib) {
if (!agents.contains(mib))
return;
agents.removeElement(mib);
if (!agents.isEmpty())
agent= agents.firstElement();
}
private void setAgent(SnmpMibAgent agent) {
this.agent = agent;
}
private void registerNode(long[] oid, int cursor, SnmpMibAgent agent) {
if (cursor >= oid.length)
//That's it !
//
return;
TreeNode child = retrieveChild(oid, cursor);
if (child == null) {
// Create a child and register it !
//
long theValue= oid[cursor];
child= new TreeNode(theValue, agent, this);
children.addElement(child);
}
else
if (agents.contains(agent) == false) {
agents.addElement(agent);
}
// We have to set the agent attribute
//
if(cursor == (oid.length - 1)) {
child.setAgent(agent);
}
else
child.registerNode(oid, cursor+1, agent);
}
private TreeNode retrieveChild(long[] oid, int current) {
long theValue= oid[current];
for(Enumeration e= children.elements(); e.hasMoreElements(); ) {
TreeNode node= (TreeNode) e.nextElement();
if (node.match(theValue))
return node;
}
return null;
}
final private boolean match(long value) {
return (nodeValue == value) ? true : false;
}
private Vector<TreeNode> children= new Vector<TreeNode>();
private Vector<SnmpMibAgent> agents= new Vector<SnmpMibAgent>();
private long nodeValue;
private SnmpMibAgent agent;
private TreeNode parent;
}; // end of class TreeNode
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.network.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.management.Resource;
import com.azure.core.management.SubResource;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.network.models.ManagedRulesDefinition;
import com.azure.resourcemanager.network.models.PolicySettings;
import com.azure.resourcemanager.network.models.ProvisioningState;
import com.azure.resourcemanager.network.models.WebApplicationFirewallCustomRule;
import com.azure.resourcemanager.network.models.WebApplicationFirewallPolicyResourceState;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import java.util.Map;
/** Defines web application firewall policy. */
@Fluent
public final class WebApplicationFirewallPolicyInner extends Resource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(WebApplicationFirewallPolicyInner.class);
/*
* Properties of the web application firewall policy.
*/
@JsonProperty(value = "properties")
private WebApplicationFirewallPolicyPropertiesFormat innerProperties;
/*
* A unique read-only string that changes whenever the resource is updated.
*/
@JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY)
private String etag;
/*
* Resource ID.
*/
@JsonProperty(value = "id")
private String id;
/**
* Get the innerProperties property: Properties of the web application firewall policy.
*
* @return the innerProperties value.
*/
private WebApplicationFirewallPolicyPropertiesFormat innerProperties() {
return this.innerProperties;
}
/**
* Get the etag property: A unique read-only string that changes whenever the resource is updated.
*
* @return the etag value.
*/
public String etag() {
return this.etag;
}
/**
* Get the id property: Resource ID.
*
* @return the id value.
*/
public String id() {
return this.id;
}
/**
* Set the id property: Resource ID.
*
* @param id the id value to set.
* @return the WebApplicationFirewallPolicyInner object itself.
*/
public WebApplicationFirewallPolicyInner withId(String id) {
this.id = id;
return this;
}
/** {@inheritDoc} */
@Override
public WebApplicationFirewallPolicyInner withLocation(String location) {
super.withLocation(location);
return this;
}
/** {@inheritDoc} */
@Override
public WebApplicationFirewallPolicyInner withTags(Map<String, String> tags) {
super.withTags(tags);
return this;
}
/**
* Get the policySettings property: The PolicySettings for policy.
*
* @return the policySettings value.
*/
public PolicySettings policySettings() {
return this.innerProperties() == null ? null : this.innerProperties().policySettings();
}
/**
* Set the policySettings property: The PolicySettings for policy.
*
* @param policySettings the policySettings value to set.
* @return the WebApplicationFirewallPolicyInner object itself.
*/
public WebApplicationFirewallPolicyInner withPolicySettings(PolicySettings policySettings) {
if (this.innerProperties() == null) {
this.innerProperties = new WebApplicationFirewallPolicyPropertiesFormat();
}
this.innerProperties().withPolicySettings(policySettings);
return this;
}
/**
* Get the customRules property: The custom rules inside the policy.
*
* @return the customRules value.
*/
public List<WebApplicationFirewallCustomRule> customRules() {
return this.innerProperties() == null ? null : this.innerProperties().customRules();
}
/**
* Set the customRules property: The custom rules inside the policy.
*
* @param customRules the customRules value to set.
* @return the WebApplicationFirewallPolicyInner object itself.
*/
public WebApplicationFirewallPolicyInner withCustomRules(List<WebApplicationFirewallCustomRule> customRules) {
if (this.innerProperties() == null) {
this.innerProperties = new WebApplicationFirewallPolicyPropertiesFormat();
}
this.innerProperties().withCustomRules(customRules);
return this;
}
/**
* Get the applicationGateways property: A collection of references to application gateways.
*
* @return the applicationGateways value.
*/
public List<ApplicationGatewayInner> applicationGateways() {
return this.innerProperties() == null ? null : this.innerProperties().applicationGateways();
}
/**
* Get the provisioningState property: The provisioning state of the web application firewall policy resource.
*
* @return the provisioningState value.
*/
public ProvisioningState provisioningState() {
return this.innerProperties() == null ? null : this.innerProperties().provisioningState();
}
/**
* Get the resourceState property: Resource status of the policy.
*
* @return the resourceState value.
*/
public WebApplicationFirewallPolicyResourceState resourceState() {
return this.innerProperties() == null ? null : this.innerProperties().resourceState();
}
/**
* Get the managedRules property: Describes the managedRules structure.
*
* @return the managedRules value.
*/
public ManagedRulesDefinition managedRules() {
return this.innerProperties() == null ? null : this.innerProperties().managedRules();
}
/**
* Set the managedRules property: Describes the managedRules structure.
*
* @param managedRules the managedRules value to set.
* @return the WebApplicationFirewallPolicyInner object itself.
*/
public WebApplicationFirewallPolicyInner withManagedRules(ManagedRulesDefinition managedRules) {
if (this.innerProperties() == null) {
this.innerProperties = new WebApplicationFirewallPolicyPropertiesFormat();
}
this.innerProperties().withManagedRules(managedRules);
return this;
}
/**
* Get the httpListeners property: A collection of references to application gateway http listeners.
*
* @return the httpListeners value.
*/
public List<SubResource> httpListeners() {
return this.innerProperties() == null ? null : this.innerProperties().httpListeners();
}
/**
* Get the pathBasedRules property: A collection of references to application gateway path rules.
*
* @return the pathBasedRules value.
*/
public List<SubResource> pathBasedRules() {
return this.innerProperties() == null ? null : this.innerProperties().pathBasedRules();
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (innerProperties() != null) {
innerProperties().validate();
}
}
}
| |
/*
* Copyright (c) 2010-2014 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.prism.foo;
import com.evolveum.midpoint.util.xml.DomAwareEqualsStrategy;
import com.evolveum.midpoint.util.xml.DomAwareHashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.Equals;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy;
import org.jvnet.jaxb2_commons.lang.HashCode;
import org.jvnet.jaxb2_commons.lang.HashCodeStrategy;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* <p>Java class for EventOperationFilterType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="EventOperationFilterType">
* <complexContent>
* <extension base="{http://midpoint.evolveum.com/xml/ns/public/common/common-3}EventHandlerType">
* <sequence>
* <element name="operation" type="{http://midpoint.evolveum.com/xml/ns/public/common/common-3}EventOperationType" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "EventOperationFilterType", propOrder = {
"operation"
})
public class EventOperationFilterType
extends EventHandlerType
implements Serializable, Cloneable, Equals, HashCode
{
private final static long serialVersionUID = 201105211233L;
protected List<String> operation;
/**
* Creates a new {@code EventOperationFilterType} instance.
*
*/
public EventOperationFilterType() {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
super();
}
/**
* Creates a new {@code EventOperationFilterType} instance by deeply copying a given {@code EventOperationFilterType} instance.
*
*
* @param o
* The instance to copy.
* @throws NullPointerException
* if {@code o} is {@code null}.
*/
public EventOperationFilterType(final EventOperationFilterType o) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
super(o);
if (o == null) {
throw new NullPointerException("Cannot create a copy of 'EventOperationFilterType' from 'null'.");
}
// 'Operation' collection.
if (o.operation!= null) {
copyOperation(o.getOperation(), this.getOperation());
}
}
/**
* Gets the value of the operation property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the operation property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOperation().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link EventOperationType }
*
*
*/
public List<String> getOperation() {
if (operation == null) {
operation = new ArrayList<>();
}
return this.operation;
}
/**
* Generates a String representation of the contents of this type.
* This is an extension method, produced by the 'ts' xjc plugin
*
*/
public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) {
int currentHashCode = super.hashCode(locator, strategy);
{
List<String> theOperation;
theOperation = (((this.operation!= null)&&(!this.operation.isEmpty()))?this.getOperation():null);
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "operation", theOperation), currentHashCode, theOperation);
}
return currentHashCode;
}
public int hashCode() {
final HashCodeStrategy strategy = DomAwareHashCodeStrategy.INSTANCE;
return this.hashCode(null, strategy);
}
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) {
if (!(object instanceof EventOperationFilterType)) {
return false;
}
if (this == object) {
return true;
}
if (!super.equals(thisLocator, thatLocator, object, strategy)) {
return false;
}
final EventOperationFilterType that = ((EventOperationFilterType) object);
{
List<String> lhsOperation;
lhsOperation = (((this.operation!= null)&&(!this.operation.isEmpty()))?this.getOperation():null);
List<String> rhsOperation;
rhsOperation = (((that.operation!= null)&&(!that.operation.isEmpty()))?that.getOperation():null);
if (!strategy.equals(LocatorUtils.property(thisLocator, "operation", lhsOperation), LocatorUtils.property(thatLocator, "operation", rhsOperation), lhsOperation, rhsOperation)) {
return false;
}
}
return true;
}
public boolean equals(Object object) {
final EqualsStrategy strategy = DomAwareEqualsStrategy.INSTANCE;
return equals(null, null, object, strategy);
}
/**
* Copies all values of property {@code Operation} deeply.
*
* @param source
* The source to copy from.
* @param target
* The target to copy {@code source} to.
* @throws NullPointerException
* if {@code target} is {@code null}.
*/
@SuppressWarnings("unchecked")
private static void copyOperation(final List<String> source, final List<String> target) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
if ((source!= null)&&(!source.isEmpty())) {
for (final Iterator<?> it = source.iterator(); it.hasNext(); ) {
final Object next = it.next();
if (next instanceof String) {
// CEnumLeafInfo: com.evolveum.midpoint.xml.ns._public.common.common_3.EventOperationType
target.add(((String) next));
continue;
}
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw new AssertionError((("Unexpected instance '"+ next)+"' for property 'Operation' of class 'com.evolveum.midpoint.xml.ns._public.common.common_3.EventOperationFilterType'."));
}
}
}
/**
* Creates and returns a deep copy of this object.
*
*
* @return
* A deep copy of this object.
*/
@Override
public EventOperationFilterType clone() {
{
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
final EventOperationFilterType clone = ((EventOperationFilterType) super.clone());
// 'Operation' collection.
if (this.operation!= null) {
clone.operation = null;
copyOperation(this.getOperation(), clone.getOperation());
}
return clone;
}
}
@Override
public String toString() {
return "EventOperationFilterType{" +
"operation=" + operation +
'}';
}
}
| |
package org.greencheek.catalina.valves;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import javax.servlet.ServletException;
import com.lmax.disruptor.EventFactory;
import com.lmax.disruptor.EventHandler;
import com.lmax.disruptor.IgnoreExceptionHandler;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
import org.apache.catalina.AccessLog;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleState;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.apache.catalina.valves.Constants;
import org.apache.catalina.valves.ValveBase;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.greencheek.util.Util;
import org.greencheek.logging.accesscomponents.*;
import org.greencheek.logging.disruptor.DefaultWaitStrategyFactory;
import org.greencheek.logging.disruptor.LogMessageEventTranslator;
import org.greencheek.logging.disruptor.WaitStrategyFactory;
import org.greencheek.logging.domin.ResizableCharBuffer;
import static java.util.concurrent.Executors.newSingleThreadExecutor;
import org.greencheek.logging.disruptor.io.*;
/**
*
* Modification of the original AccessLogValue from tomcat 7, but changed to use
* a distruptor for writing to the access log. This removes the synchronization blocks
* the original access log implementation has around the write to disk
*
*
* <p>Implementation of the <b>Valve</b> interface that generates a web server
* access log with the detailed line contents matching a configurable pattern.
* The syntax of the available patterns is similar to that supported by the
* <a href="http://httpd.apache.org/">Apache HTTP Server</a>
* <code>mod_log_config</code> module. As an additional feature,
* automatic rollover of log files when the date changes is also supported.</p>
*
* <p>Patterns for the logged message may include constant text or any of the
* following replacement strings, for which the corresponding information
* from the specified Response is substituted:</p>
* <ul>
* <li><b>%a</b> - Remote IP address
* <li><b>%A</b> - Local IP address
* <li><b>%b</b> - Bytes sent, excluding HTTP headers, or '-' if no bytes
* were sent
* <li><b>%B</b> - Bytes sent, excluding HTTP headers
* <li><b>%h</b> - Remote host name (or IP address if
* <code>enableLookups</code> for the connector is false)
* <li><b>%H</b> - Request protocol
* <li><b>%l</b> - Remote logical username from identd (always returns '-')
* <li><b>%m</b> - Request method
* <li><b>%p</b> - Local port
* <li><b>%q</b> - Query string (prepended with a '?' if it exists, otherwise
* an empty string
* <li><b>%r</b> - First line of the request
* <li><b>%s</b> - HTTP status code of the response
* <li><b>%S</b> - User session ID
* <li><b>%t</b> - Date and time, in Common Log Format format
* <li><b>%t{format}</b> - Date and time, in any format supported by SimpleDateFormat
* <li><b>%u</b> - Remote user that was authenticated
* <li><b>%U</b> - Requested URL path
* <li><b>%v</b> - Local server name
* <li><b>%D</b> - Time taken to process the request, in millis
* <li><b>%T</b> - Time taken to process the request, in seconds
* <li><b>%I</b> - current Request thread name (can compare later with stacktraces)
* </ul>
* <p>In addition, the caller can specify one of the following aliases for
* commonly utilized patterns:</p>
* <ul>
* <li><b>common</b> - <code>%h %l %u %t "%r" %s %b</code>
* <li><b>combined</b> -
* <code>%h %l %u %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"</code>
* </ul>
*
* <p>
* There is also support to write information from the cookie, incoming
* header, the Session or something else in the ServletRequest.<br>
* It is modeled after the
* <a href="http://httpd.apache.org/">Apache HTTP Server</a> log configuration
* syntax:</p>
* <ul>
* <li><code>%{xxx}i</code> for incoming headers
* <li><code>%{xxx}o</code> for outgoing response headers
* <li><code>%{xxx}c</code> for a specific cookie
* <li><code>%{xxx}r</code> xxx is an attribute in the ServletRequest
* <li><code>%{xxx}s</code> xxx is an attribute in the HttpSession
* <li><code>%{xxx}t</code> xxx is an enhanced SimpleDateFormat pattern
* (see Configuration Reference document for details on supported time patterns)
* </ul>
*
* <p>
* Log rotation can be on or off. This is dictated by the
* <code>rotatable</code> property.
* </p>
*
* <p>
* For UNIX users, another field called <code>checkExists</code> is also
* available. If set to true, the log file's existence will be checked before
* each logging. This way an external log rotator can move the file
* somewhere and Tomcat will start with a new file.
* </p>
*
* <p>
* For JMX junkies, a public method called <code>rotate</code> has
* been made available to allow you to tell this instance to move
* the existing log file to somewhere else and start writing a new log file.
* </p>
*
* <p>
* Conditional logging is also supported. This can be done with the
* <code>conditionUnless</code> and <code>conditionIf</code> properties.
* If the value returned from ServletRequest.getAttribute(conditionUnless)
* yields a non-null value, the logging will be skipped.
* If the value returned from ServletRequest.getAttribute(conditionIf)
* yields the null value, the logging will be skipped.
* The <code>condition</code> attribute is synonym for
* <code>conditionUnless</code> and is provided for backwards compatibility.
* </p>
*
* <p>
* For extended attributes coming from a getAttribute() call,
* it is you responsibility to ensure there are no newline or
* control characters.
* </p>
*
* @author Craig R. McClanahan
* @author Jason Brittain
* @author Remy Maucherat
* @author Takayuki Kaneko
* @author Peter Rossbach
*
* @version $Id$
*/
public class DisruptorAccessLogValve extends ValveBase implements AccessLog {
private static final Log log = LogFactory.getLog(DisruptorAccessLogValve.class);
private final ExecutorService executorService = newSingleThreadExecutor();
private Disruptor<ResizableCharBuffer> disruptor;
// the size of the ring buffer for storing log messages
// This is about 22mb (less than. A ResizeableCharBuffer, that is 256 chars in size
// is 680 bytes in size.
private int bufferSize = 32768;
private WaitStrategyFactory waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.SLEEPING);
private String waitStrategy;
LogEventHandler eventHandler;
// The size of a message.
private int messageSize = 256;
// The page size of the os. The size of the buffer
// before an io write is committed
private int ioPageSize = 4096;
// PreAllocate 100 mb access log file
private int ioPreallocatedFileSize = 1024*1024*100;
//------------------------------------------------------ Constructor
public DisruptorAccessLogValve() {
super(true);
}
// ----------------------------------------------------- Instance Variables
/**
* The as-of date for the currently open log file, or a zero-length
* string if there is no open log file.
*/
private volatile String dateStamp = "";
/**
* The directory in which log files are created.
*/
private String directory = "logs";
/**
* The descriptive information about this implementation.
*/
protected static final String info =
"org.greencheek.catalina.valves.DisruptorAccessLogValve/1.0";
/**
* enabled this component
*/
protected boolean enabled = true;
/**
* The pattern used to format our access log lines.
*/
protected String pattern = null;
/**
* The prefix that is added to log file filenames.
*/
protected String prefix = "access_log.";
/**
* Should we rotate our log file? Default is true (like old behavior)
*/
protected boolean rotatable = true;
/**
* Should we defer inclusion of the date stamp in the file
* name until rotate time? Default is false.
*/
protected boolean renameOnRotate = false;
/**
* Buffered logging.
*/
private boolean buffered = true;
/**
* The suffix that is added to log file filenames.
*/
protected String suffix = "";
/**
* The PrintWriter to which we are currently logging, if any.
*/
protected PrintWriter writer = null;
/**
* A date formatter to format a Date using the format
* given by <code>fileDateFormat</code>.
*/
protected SimpleDateFormat fileDateFormatter = null;
/**
* The current log file we are writing to. Helpful when checkExists
* is true.
*/
protected File currentLogFile = null;
/**
* The system time when we last updated the Date that this valve
* uses for log lines.
*/
private static final ThreadLocal<Date> localDate = new ThreadLocal<Date>() {
@Override
protected Date initialValue() {
return new Date();
}
};
/**
* The disruptor translators used for copying into the ResizableCharBuffer
* from the ring buffer.
*/
private static final ThreadLocal<LogMessageEventTranslator> logEventTranslator = new ThreadLocal<LogMessageEventTranslator>() {
@Override
protected LogMessageEventTranslator initialValue() {
return new LogMessageEventTranslator();
}
};
/**
* Resolve hosts.
*/
private boolean resolveHosts = false;
/**
* Instant when the log daily rotation was last checked.
*/
private volatile long rotationLastChecked = 0L;
/**
* Do we check for log file existence? Helpful if an external
* agent renames the log file so we can automagically recreate it.
*/
private boolean checkExists = false;
/**
* Are we doing conditional logging. default null.
* It is the value of <code>conditionUnless</code> property.
*/
protected String condition = null;
/**
* Are we doing conditional logging. default null.
* It is the value of <code>conditionIf</code> property.
*/
protected String conditionIf = null;
/**
* Date format to place in log file name.
*/
protected String fileDateFormat = "yyyy-MM-dd";
/**
* Name of locale used to format timestamps in log entries and in
* log file name suffix.
*/
protected String localeName = Locale.getDefault().toString();
/**
* Locale used to format timestamps in log entries and in
* log file name suffix.
*/
protected Locale locale = Locale.getDefault();
/**
* Character set used by the log file. If it is <code>null</code>, the
* system default character set will be used. An empty string will be
* treated as <code>null</code> when this property is assigned.
*/
protected String encoding = null;
/**
* Array of AccessLogElement, they will be used to make log message.
*/
protected AccessLogElement[] logElements = null;
/**
* @see #setRequestAttributesEnabled(boolean)
*/
protected boolean requestAttributesEnabled = false;
// ------------------------------------------------------------- Properties
public int getIoPreallocatedFileSize() {
return ioPreallocatedFileSize;
}
public void setIoPreallocatedFileSize(int size) {
this.ioPreallocatedFileSize = size;
}
public int getIoPageSize() {
return ioPageSize;
}
public void setIoPageSize(int ioPageSize) {
this.ioPageSize = ioPageSize;
}
public String getWaitStrategy() {
return waitStrategy;
}
public void setWaitStrategy(String waitStrategy) {
this.waitStrategy = waitStrategy;
if(waitStrategy.contains("yield")) {
waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.YIELDING);
}
else if(waitStrategy.contains("block")) {
waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.BLOCKING);
}
else if(waitStrategy.contains("sleep")) {
waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.SLEEPING);
}
else if (waitStrategy.contains("busy")) {
waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.BUSY);
}
else {
waitStrategyFactory = new DefaultWaitStrategyFactory(DefaultWaitStrategyFactory.WAIT_STRATEGY_TYPE.YIELDING);
}
}
public int getBufferSize() {
return bufferSize;
}
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
public int getMessageSize() {
return messageSize;
}
public void setMessageSize(int size) {
this.messageSize = size;
}
/**
* @return Returns the enabled.
*/
public boolean getEnabled() {
return enabled;
}
/**
* {@inheritDoc}
*/
@Override
public void setRequestAttributesEnabled(boolean requestAttributesEnabled) {
this.requestAttributesEnabled = requestAttributesEnabled;
}
/**
* {@inheritDoc}
*/
@Override
public boolean getRequestAttributesEnabled() {
return requestAttributesEnabled;
}
/**
* @param enabled
* The enabled to set.
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/**
* Return the directory in which we create log files.
*/
public String getDirectory() {
return (directory);
}
/**
* Set the directory in which we create log files.
*
* @param directory The new log file directory
*/
public void setDirectory(String directory) {
this.directory = directory;
}
/**
* Return descriptive information about this implementation.
*/
@Override
public String getInfo() {
return (info);
}
/**
* Return the format pattern.
*/
public String getPattern() {
return (this.pattern);
}
/**
* Set the format pattern, first translating any recognized alias.
*
* @param pattern The new pattern
*/
public void setPattern(String pattern) {
if (pattern == null) {
this.pattern = "";
} else if (pattern.equals(Constants.AccessLog.COMMON_ALIAS)) {
this.pattern = Constants.AccessLog.COMMON_PATTERN;
} else if (pattern.equals(Constants.AccessLog.COMBINED_ALIAS)) {
this.pattern = Constants.AccessLog.COMBINED_PATTERN;
} else {
this.pattern = pattern;
}
logElements = createLogElements();
}
/**
* Check for file existence before logging.
*/
public boolean isCheckExists() {
return checkExists;
}
/**
* Set whether to check for log file existence before logging.
*
* @param checkExists true meaning to check for file existence.
*/
public void setCheckExists(boolean checkExists) {
this.checkExists = checkExists;
}
/**
* Return the log file prefix.
*/
public String getPrefix() {
return (prefix);
}
/**
* Set the log file prefix.
*
* @param prefix The new log file prefix
*/
public void setPrefix(String prefix) {
this.prefix = prefix;
}
/**
* Should we rotate the logs
*/
public boolean isRotatable() {
return rotatable;
}
/**
* Set the value is we should we rotate the logs
*
* @param rotatable true is we should rotate.
*/
public void setRotatable(boolean rotatable) {
this.rotatable = rotatable;
}
/**
* Should we defer inclusion of the date stamp in the file
* name until rotate time
*/
public boolean isRenameOnRotate() {
return renameOnRotate;
}
/**
* Set the value if we should defer inclusion of the date
* stamp in the file name until rotate time
*
* @param renameOnRotate true if defer inclusion of date stamp
*/
public void setRenameOnRotate(boolean renameOnRotate) {
this.renameOnRotate = renameOnRotate;
}
/**
* Is the logging buffered
*/
public boolean isBuffered() {
return buffered;
}
/**
* Set the value if the logging should be buffered
*
* @param buffered true if buffered.
*/
public void setBuffered(boolean buffered) {
this.buffered = buffered;
}
/**
* Return the log file suffix.
*/
public String getSuffix() {
return (suffix);
}
/**
* Set the log file suffix.
*
* @param suffix The new log file suffix
*/
public void setSuffix(String suffix) {
this.suffix = suffix;
}
/**
* Set the resolve hosts flag.
*
* @param resolveHosts The new resolve hosts value
* @deprecated Unused, removed in Tomcat 8.
* See org.apache.catalina.connector.Connector.setEnableLookups(boolean).
*/
@Deprecated
public void setResolveHosts(boolean resolveHosts) {
this.resolveHosts = resolveHosts;
}
/**
* Get the value of the resolve hosts flag.
* @deprecated Unused, removed in Tomcat 8.
* See org.apache.catalina.connector.Connector.setEnableLookups(boolean).
*/
@Deprecated
public boolean isResolveHosts() {
return resolveHosts;
}
/**
* Return whether the attribute name to look for when
* performing conditional logging. If null, every
* request is logged.
*/
public String getCondition() {
return condition;
}
/**
* Set the ServletRequest.attribute to look for to perform
* conditional logging. Set to null to log everything.
*
* @param condition Set to null to log everything
*/
public void setCondition(String condition) {
this.condition = condition;
}
/**
* Return whether the attribute name to look for when
* performing conditional logging. If null, every
* request is logged.
*/
public String getConditionUnless() {
return getCondition();
}
/**
* Set the ServletRequest.attribute to look for to perform
* conditional logging. Set to null to log everything.
*
* @param condition Set to null to log everything
*/
public void setConditionUnless(String condition) {
setCondition(condition);
}
/**
* Return whether the attribute name to look for when
* performing conditional logging. If null, every
* request is logged.
*/
public String getConditionIf() {
return conditionIf;
}
/**
* Set the ServletRequest.attribute to look for to perform
* conditional logging. Set to null to log everything.
*
* @param condition Set to null to log everything
*/
public void setConditionIf(String condition) {
this.conditionIf = condition;
}
/**
* Return the date format date based log rotation.
*/
public String getFileDateFormat() {
return fileDateFormat;
}
/**
* Set the date format date based log rotation.
*/
public void setFileDateFormat(String fileDateFormat) {
String newFormat;
if (fileDateFormat == null) {
newFormat = "";
} else {
newFormat = fileDateFormat;
}
this.fileDateFormat = newFormat;
synchronized (this) {
fileDateFormatter = new SimpleDateFormat(newFormat, Locale.US);
fileDateFormatter.setTimeZone(TimeZone.getDefault());
}
}
/**
* Return the locale used to format timestamps in log entries and in
* log file name suffix.
*/
public String getLocale() {
return localeName;
}
/**
* Set the locale used to format timestamps in log entries and in
* log file name suffix. Changing the locale is only supported
* as long as the AccessLogValve has not logged anything. Changing
* the locale later can lead to inconsistent formatting.
*
* @param localeName The locale to use.
*/
public void setLocale(String localeName) {
this.localeName = localeName;
locale = findLocale(localeName, locale);
}
/**
* Return the character set name that is used to write the log file.
*
* @return Character set name, or <code>null</code> if the system default
* character set is used.
*/
public String getEncoding() {
return encoding;
}
/**
* Set the character set that is used to write the log file.
*
* @param encoding The name of the character set.
*/
public void setEncoding(String encoding) {
if (encoding != null && encoding.length() > 0) {
this.encoding = encoding;
} else {
this.encoding = null;
}
}
// --------------------------------------------------------- Public Methods
/**
* Execute a periodic task, such as reloading, etc. This method will be
* invoked inside the classloading context of this container. Unexpected
* throwables will be caught and logged.
*/
@Override
public synchronized void backgroundProcess() {
if (getState().isAvailable() && getEnabled() && writer != null &&
buffered) {
writer.flush();
}
}
/**
* Log a message summarizing the specified request and response, according
* to the format specified by the <code>pattern</code> property.
*
* @param request Request being processed
* @param response Response being processed
*
* @exception IOException if an input/output error has occurred
* @exception ServletException if a servlet error has occurred
*/
@Override
public void invoke(Request request, Response response) throws IOException,
ServletException {
getNext().invoke(request, response);
}
@Override
public void log(Request request, Response response, long time) {
if (!getState().isAvailable() || !getEnabled() || logElements == null
|| condition != null
&& null != request.getRequest().getAttribute(condition)
|| conditionIf != null
&& null == request.getRequest().getAttribute(conditionIf)) {
return;
}
/**
* XXX This is a bit silly, but we want to have start and stop time and
* duration consistent. It would be better to keep start and stop
* simply in the request and/or response object and remove time
* (duration) from the interface.
*/
long start = request.getCoyoteRequest().getStartTime();
LogMessageEventTranslator translator = logEventTranslator.get();
translator.setLogElements(logElements);
translator.setLogDate(start + time);
translator.setRequest(request);
translator.setResponse(response);
translator.setTime(time);
disruptor.publishEvent(translator);
}
// -------------------------------------------------------- Private Methods
/**
* This method returns a ThreadLocal Date object that is set to the
* specified time. This saves creating a new Date object for every request.
*
* @return Date
*/
private static Date getDate(long systime) {
Date date = localDate.get();
date.setTime(systime);
return date;
}
/**
* Find a locale by name
*/
protected static Locale findLocale(String name, Locale fallback) {
if (name == null || name.isEmpty()) {
return Locale.getDefault();
} else {
for (Locale l: Locale.getAvailableLocales()) {
if (name.equals(l.toString())) {
return(l);
}
}
}
log.error(sm.getString("accessLogValve.invalidLocale", name));
return fallback;
}
/**
* Start this component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#startInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected synchronized void startInternal() throws LifecycleException {
disruptor = new Disruptor<ResizableCharBuffer>(
new EventFactory<ResizableCharBuffer>() {
@Override
public ResizableCharBuffer newInstance() {
return new ResizableCharBuffer(messageSize);
}
},
Util.ceilingNextPowerOfTwo(bufferSize), executorService,
ProducerType.MULTI, waitStrategyFactory.createWaitStrategy());
eventHandler = new LogEventHandler(
sm,encoding,log,getFileDateFormat(),
directory,rotatable,renameOnRotate,
!buffered,ioPageSize,ioPreallocatedFileSize,suffix,prefix,checkExists);
disruptor.handleExceptionsWith(new IgnoreExceptionHandler());
disruptor.handleEventsWith(new EventHandler[] {eventHandler});
disruptor.start();
setState(LifecycleState.STARTING);
}
/**
* Stop this component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected synchronized void stopInternal() throws LifecycleException {
eventHandler.shutdown();
executorService.shutdownNow();
disruptor.shutdown();
setState(LifecycleState.STOPPING);
}
/**
* parse pattern string and create the array of AccessLogElement
*/
protected AccessLogElement[] createLogElements() {
List<AccessLogElement> list = new ArrayList<AccessLogElement>();
boolean replace = false;
StringBuilder buf = new StringBuilder();
for (int i = 0; i < pattern.length(); i++) {
char ch = pattern.charAt(i);
if (replace) {
/*
* For code that processes {, the behavior will be ... if I do
* not encounter a closing } - then I ignore the {
*/
if ('{' == ch) {
StringBuilder name = new StringBuilder();
int j = i + 1;
for (; j < pattern.length() && '}' != pattern.charAt(j); j++) {
name.append(pattern.charAt(j));
}
if (j + 1 < pattern.length()) {
/* the +1 was to account for } which we increment now */
j++;
list.add(createAccessLogElement(name.toString(),
pattern.charAt(j)));
i = j; /* Since we walked more than one character */
} else {
// D'oh - end of string - pretend we never did this
// and do processing the "old way"
list.add(createAccessLogElement(ch));
}
} else {
list.add(createAccessLogElement(ch));
}
replace = false;
} else if (ch == '%') {
replace = true;
String characters = buf.toString();
if(characters.length()>1) {
list.add(new StringElement(characters));
} else {
if(characters.length()==1) {
list.add(new CharElement(characters.charAt(0)));
}
}
buf = new StringBuilder();
} else {
buf.append(ch);
}
}
if (buf.length() > 1) {
list.add(new StringElement(buf.toString()));
} else {
if(buf.length()==1) {
list.add(new CharElement(buf.charAt(0)));
}
}
return list.toArray(new AccessLogElement[0]);
}
/**
* create an AccessLogElement implementation which needs header string
*/
protected AccessLogElement createAccessLogElement(String header, char pattern) {
switch (pattern) {
case 'i':
return new HeaderElement(header);
case 'c':
return new CookieElement(header);
case 'o':
return new ResponseHeaderElement(header);
case 'r':
return new RequestAttributeElement(header);
case 's':
return new SessionAttributeElement(header);
case 't':
return new DateAndTimeElement(header);
default:
return new StringElement("???");
}
}
/**
* create an AccessLogElement implementation
*/
protected AccessLogElement createAccessLogElement(char pattern) {
switch (pattern) {
case 'a':
return new RemoteAddrElement(requestAttributesEnabled);
case 'A':
return new LocalAddrElement();
case 'b':
return new ByteSentElement(true);
case 'B':
return new ByteSentElement(false);
case 'D':
return new ElapsedTimeElement(true);
case 'F':
return new FirstByteTimeElement();
case 'h':
return new HostElement(requestAttributesEnabled);
case 'H':
return new ProtocolElement(requestAttributesEnabled);
case 'l':
return new LogicalUserNameElement();
case 'm':
return new MethodElement();
case 'p':
return new LocalPortElement(requestAttributesEnabled);
case 'q':
return new QueryElement();
case 'r':
return new RequestElement();
case 's':
return new HttpStatusCodeElement();
case 'S':
return new SessionIdElement();
case 't':
return new DateAndTimeElement();
case 'T':
return new ElapsedTimeElement(false);
case 'u':
return new UserElement();
case 'U':
return new RequestURIElement();
case 'v':
return new LocalServerNameElement();
case 'I':
return new ThreadNameElement();
default:
return new StringElement("???" + pattern + "???");
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.SecurityUtilTestHelper;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ContainerType;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
import org.apache.hadoop.yarn.server.resourcemanager.RMSecretManagerService;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
public class TestContainerAllocation {
private static final Log LOG = LogFactory
.getLog(TestContainerAllocation.class);
private final int GB = 1024;
private YarnConfiguration conf;
RMNodeLabelsManager mgr;
@Before
public void setUp() throws Exception {
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
mgr = new NullRMNodeLabelsManager();
mgr.init(conf);
}
@Test(timeout = 3000000)
public void testExcessReservationThanNodeManagerCapacity() throws Exception {
@SuppressWarnings("resource")
MockRM rm = new MockRM(conf);
rm.start();
// Register node1
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 2 * GB, 4);
MockNM nm2 = rm.registerNode("127.0.0.1:2234", 3 * GB, 4);
nm1.nodeHeartbeat(true);
nm2.nodeHeartbeat(true);
// wait..
int waitCount = 20;
int size = rm.getRMContext().getRMNodes().size();
while ((size = rm.getRMContext().getRMNodes().size()) != 2
&& waitCount-- > 0) {
LOG.info("Waiting for node managers to register : " + size);
Thread.sleep(100);
}
Assert.assertEquals(2, rm.getRMContext().getRMNodes().size());
// Submit an application
RMApp app1 = rm.submitApp(128);
// kick the scheduling
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
LOG.info("sending container requests ");
am1.addRequests(new String[] {"*"}, 2 * GB, 1, 1);
AllocateResponse alloc1Response = am1.schedule(); // send the request
// kick the scheduler
nm1.nodeHeartbeat(true);
int waitCounter = 20;
LOG.info("heartbeating nm1");
while (alloc1Response.getAllocatedContainers().size() < 1
&& waitCounter-- > 0) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(500);
alloc1Response = am1.schedule();
}
LOG.info("received container : "
+ alloc1Response.getAllocatedContainers().size());
// No container should be allocated.
// Internally it should not been reserved.
Assert.assertTrue(alloc1Response.getAllocatedContainers().size() == 0);
LOG.info("heartbeating nm2");
waitCounter = 20;
nm2.nodeHeartbeat(true);
while (alloc1Response.getAllocatedContainers().size() < 1
&& waitCounter-- > 0) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(500);
alloc1Response = am1.schedule();
}
LOG.info("received container : "
+ alloc1Response.getAllocatedContainers().size());
Assert.assertTrue(alloc1Response.getAllocatedContainers().size() == 1);
rm.stop();
}
// This is to test container tokens are generated when the containers are
// acquired by the AM, not when the containers are allocated
@Test
public void testContainerTokenGeneratedOnPullRequest() throws Exception {
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 8000);
RMApp app1 = rm1.submitApp(200);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// request a container.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED);
RMContainer container =
rm1.getResourceScheduler().getRMContainer(containerId2);
// no container token is generated.
Assert.assertEquals(containerId2, container.getContainerId());
Assert.assertNull(container.getContainer().getContainerToken());
// acquire the container.
List<Container> containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
Assert.assertEquals(containerId2, containers.get(0).getId());
// container token is generated.
Assert.assertNotNull(containers.get(0).getContainerToken());
rm1.stop();
}
@Test
public void testNormalContainerAllocationWhenDNSUnavailable() throws Exception{
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("unknownhost:1234", 8000);
RMApp app1 = rm1.submitApp(200);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// request a container.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED);
// acquire the container.
SecurityUtilTestHelper.setTokenServiceUseIp(true);
List<Container> containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
// not able to fetch the container;
Assert.assertEquals(0, containers.size());
SecurityUtilTestHelper.setTokenServiceUseIp(false);
containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
// should be able to fetch the container;
Assert.assertEquals(1, containers.size());
}
// This is to test whether LogAggregationContext is passed into
// container tokens correctly
@Test
public void testLogAggregationContextPassedIntoContainerToken()
throws Exception {
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 8000);
MockNM nm2 = rm1.registerNode("127.0.0.1:2345", 8000);
// LogAggregationContext is set as null
Assert
.assertNull(getLogAggregationContextFromContainerToken(rm1, nm1, null));
// create a not-null LogAggregationContext
LogAggregationContext logAggregationContext =
LogAggregationContext.newInstance(
"includePattern", "excludePattern",
"rolledLogsIncludePattern",
"rolledLogsExcludePattern");
LogAggregationContext returned =
getLogAggregationContextFromContainerToken(rm1, nm2,
logAggregationContext);
Assert.assertEquals("includePattern", returned.getIncludePattern());
Assert.assertEquals("excludePattern", returned.getExcludePattern());
Assert.assertEquals("rolledLogsIncludePattern",
returned.getRolledLogsIncludePattern());
Assert.assertEquals("rolledLogsExcludePattern",
returned.getRolledLogsExcludePattern());
rm1.stop();
}
private LogAggregationContext getLogAggregationContextFromContainerToken(
MockRM rm1, MockNM nm1, LogAggregationContext logAggregationContext)
throws Exception {
RMApp app2 = rm1.submitApp(200, logAggregationContext);
MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm1);
nm1.nodeHeartbeat(true);
// request a container.
am2.allocate("127.0.0.1", 512, 1, new ArrayList<ContainerId>());
ContainerId containerId =
ContainerId.newContainerId(am2.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED);
// acquire the container.
List<Container> containers =
am2.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
Assert.assertEquals(containerId, containers.get(0).getId());
// container token is generated.
Assert.assertNotNull(containers.get(0).getContainerToken());
ContainerTokenIdentifier token =
BuilderUtils.newContainerTokenIdentifier(containers.get(0)
.getContainerToken());
return token.getLogAggregationContext();
}
private volatile int numRetries = 0;
private class TestRMSecretManagerService extends RMSecretManagerService {
public TestRMSecretManagerService(Configuration conf,
RMContextImpl rmContext) {
super(conf, rmContext);
}
@Override
protected RMContainerTokenSecretManager createContainerTokenSecretManager(
Configuration conf) {
return new RMContainerTokenSecretManager(conf) {
@Override
public Token createContainerToken(ContainerId containerId,
NodeId nodeId, String appSubmitter, Resource capability,
Priority priority, long createTime,
LogAggregationContext logAggregationContext, String nodeLabelExp, ContainerType containerType) {
numRetries++;
return super.createContainerToken(containerId, nodeId, appSubmitter,
capability, priority, createTime, logAggregationContext,
nodeLabelExp, containerType);
}
};
}
}
// This is to test fetching AM container will be retried, if AM container is
// not fetchable since DNS is unavailable causing container token/NMtoken
// creation failure.
@Test(timeout = 30000)
public void testAMContainerAllocationWhenDNSUnavailable() throws Exception {
MockRM rm1 = new MockRM(conf) {
@Override
protected RMSecretManagerService createRMSecretManagerService() {
return new TestRMSecretManagerService(conf, rmContext);
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("unknownhost:1234", 8000);
SecurityUtilTestHelper.setTokenServiceUseIp(true);
RMApp app1 = rm1.submitApp(200);
RMAppAttempt attempt = app1.getCurrentAppAttempt();
nm1.nodeHeartbeat(true);
// fetching am container will fail, keep retrying 5 times.
while (numRetries <= 5) {
nm1.nodeHeartbeat(true);
Thread.sleep(1000);
Assert.assertEquals(RMAppAttemptState.SCHEDULED,
attempt.getAppAttemptState());
System.out.println("Waiting for am container to be allocated.");
}
SecurityUtilTestHelper.setTokenServiceUseIp(false);
MockRM.launchAndRegisterAM(app1, rm1, nm1);
}
}
| |
package edu.cmu.cs.faceswap;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentSender;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.EditText;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.drive.Drive;
import com.google.android.gms.drive.DriveApi;
import com.google.android.gms.drive.DriveId;
import com.google.android.gms.drive.MetadataChangeSet;
import com.google.android.gms.drive.OpenFileActivityBuilder;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import edu.cmu.cs.IO.RetrieveDriveFileContentsAsyncTask;
import edu.cmu.cs.gabriel.Const;
import edu.cmu.cs.gabriel.GabrielClientActivity;
import edu.cmu.cs.gabriel.GabrielConfigurationAsyncTask;
import edu.cmu.cs.utils.UIUtils;
import filepickerlibrary.FilePickerActivity;
import static edu.cmu.cs.CustomExceptions.CustomExceptions.notifyError;
import static edu.cmu.cs.utils.NetworkUtils.checkOnline;
import static edu.cmu.cs.utils.NetworkUtils.isOnline;
import static edu.cmu.cs.utils.UIUtils.prepareForResultIntentForFilePickerActivity;
public class CloudletDemoActivity extends AppCompatActivity implements
GabrielConfigurationAsyncTask.AsyncResponse, GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener{
private Toolbar toolbar;
private TabLayout tabLayout;
private ViewPager viewPager;
private static final String TAG = "cloudletDemoActivity";
private static final int DLG_EXAMPLE1 = 0;
private static final int TEXT_ID = 999;
public String inputDialogResult;
private CloudletFragment childFragment;
private EditText dialogInputTextEdit;
public SharedPreferences mSharedPreferences= null;
private byte[] asyncResponseExtra=null;
public String currentServerIp=null;
private Activity mActivity=null;
//fix the bug for load_state, and onresume race for sending
public boolean onResumeFromLoadState=false;
private GoogleApiClient mGoogleApiClient;
public static final int GDRIVE_RESOLVE_CONNECTION_REQUEST_CODE =32891;
private static final int GDRIVE_REQUEST_CODE_OPENER = 23091;
private static final int GDRIVE_REQUEST_CODE_CREATOR = 20391;
//closed when request for connection.
// open when onconnected
// or no solution for failed connection
// or solution result comes back
private int pendingGDriveAction=-1;
private static final int GDRIVE_ACTION_LOAD=12;
private static final int GDRIVE_ACTION_SAVE=13;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (!isOnline(this)){
notifyError(Const.CONNECTIVITY_NOT_AVAILABLE, true, this);
} else {
setContentView(R.layout.activity_cloudlet_demo);
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
childFragment=
(CloudletFragment) getSupportFragmentManager().findFragmentById(R.id.demofragment);
mSharedPreferences=getSharedPreferences(getString(R.string.shared_preference_file_key),
MODE_PRIVATE);
}
Log.d(TAG,"on create");
// mGoogleApiClient = new GoogleApiClient.Builder(this)
// .addApi(Drive.API)
// .addScope(Drive.SCOPE_FILE)
// .addConnectionCallbacks(this)
// .addOnConnectionFailedListener(this)
// .build();
mActivity=this;
}
@Override
protected void onStart() {
super.onStart();
Log.i(TAG, "on start");
// mGoogleApiClient.connect();
}
//activity menu
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_cloudlet_demo, menu);
return true;
}
// @Override
// public void onDialogEditTextResult(String result) {
// inputDialogResult=result;
// }
/**
* callback when gabriel configuration async task finished
* @param action
* @param success
* @param extra
*/
@Override
public void onGabrielConfigurationAsyncTaskFinish(String action,
boolean success,
byte[] extra) {
if (action.equals(Const.GABRIEL_CONFIGURATION_RESET_STATE)){
if (!success){
String errorMsg=
"No Gabriel Server Found. \n" +
"Please define a valid Gabriel Server IP ";
notifyError(errorMsg, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
},this);
}
} else if (action.equals(Const.GABRIEL_CONFIGURATION_UPLOAD_STATE)){
Log.d(TAG, "upload state finished. success? " + success);
if (success){
//fetch person's name
Log.d(TAG, "request trained people list" + success);
sendOpenFaceGetPersonRequest(currentServerIp);
}
} else if (action.equals(Const.GABRIEL_CONFIGURATION_DOWNLOAD_STATE)) {
Log.d(TAG, "download state finished. success? " + success);
if (success) {
asyncResponseExtra = extra;
Intent intent = prepareForResultIntentForFilePickerActivity(this, false);
startActivityForResult(intent, FilePickerActivity.REQUEST_FILE);
}
} else if (action.equals(Const.GABRIEL_CONFIGURATION_DOWNLOAD_STATE_TO_GDRIVE)){
Log.d(TAG, "download state to google drive finished. success? " + success);
if (success) {
asyncResponseExtra = extra;
actionSaveStateFileToGoogleDrive();
}
} else if (action.equals(Const.GABRIEL_CONFIGURATION_GET_PERSON)){
Log.d(TAG, "download person finished. success? " + success);
if (success){
asyncResponseExtra=extra;
String peopleString=new String(asyncResponseExtra);
Log.i(TAG, "people : " + new String(asyncResponseExtra));
//remove bracket
String peopleStringNoBracket=peopleString.substring(1,peopleString.length()-1);
childFragment.clearPersonTable();
if (!peopleStringNoBracket.isEmpty()){
String[] people=peopleStringNoBracket.split(",");
childFragment.populatePersonTable(people);
}
} else {
//clear UI
childFragment.clearPersonTable();
}
}
}
public void actionUploadStateByteArray(byte[] stateData){
if (stateData!=null){
onResumeFromLoadState=true;
sendOpenFaceLoadStateRequest(stateData);
} else {
Log.e(TAG, "wrong file format");
Toast.makeText(this, "wrong file format", Toast.LENGTH_LONG).show();
}
}
/**
* file picker activie result
* @param requestCode
* @param resultCode
* @param data
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode){
case FilePickerActivity.REQUEST_FILE:
if (resultCode==RESULT_OK){
Bundle extras = data.getExtras();
String path = (String) extras.get(FilePickerActivity.FILE_EXTRA_DATA_PATH);
Log.d(TAG, "path: " + path);
boolean isLoad=(boolean)extras.get(FilePickerActivity.INTENT_EXTRA_ACTION_READ);
File file=new File(path);
if (isLoad){
byte[] stateData= UIUtils.loadFromFile(file);
if (stateData!=null){
actionUploadStateByteArray(stateData);
} else {
Toast.makeText(this, "Invalid File",Toast.LENGTH_SHORT).show();
}
} else {
if (this.asyncResponseExtra!=null){
UIUtils.saveToFile(file, this.asyncResponseExtra);
}
}
}
break;
case GDRIVE_RESOLVE_CONNECTION_REQUEST_CODE:
if (resultCode == RESULT_OK) {
Log.i(TAG, "drive client problem resolved. Trying to connect");
mGoogleApiClient.connect();
} else {
//if not okay, then give up
pendingGDriveAction=-1;
Log.i(TAG, "drive connection resolution failed");
Toast.makeText(this,"Failed to Connect Google Drive", Toast.LENGTH_LONG).show();
}
break;
case GDRIVE_REQUEST_CODE_OPENER:
if (resultCode == RESULT_OK) {
DriveId fileId = (DriveId) data.getParcelableExtra(
OpenFileActivityBuilder.EXTRA_RESPONSE_DRIVE_ID);
Log.i(TAG, "user select drive file id: "+fileId);
openDriveFile(fileId);
}
break;
case GDRIVE_REQUEST_CODE_CREATOR:
// Called after a file is saved to Drive.
if (resultCode == RESULT_OK) {
Log.i(TAG, "Image successfully saved.");
Toast.makeText(this,
"succesfully saved to google drive", Toast.LENGTH_SHORT).show();
}
break;
}
}
/**
* send load state control request to OpenFaceServer
* @param stateData
*/
private boolean sendOpenFaceLoadStateRequest(byte[] stateData) {
if (!checkOnline(this)){
return false;
}
//fire off upload state async task
//return value will be called into onGabrielConfigurationAsyncTaskFinish
GabrielConfigurationAsyncTask task =
new GabrielConfigurationAsyncTask(this,
currentServerIp,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_UPLOAD_STATE,
this);
task.execute(stateData);
return true;
}
public void sendOpenFaceResetRequest(String remoteIP) {
boolean online = isOnline(this);
if (online){
GabrielConfigurationAsyncTask task =
new GabrielConfigurationAsyncTask(this,
remoteIP,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_RESET_STATE,
this);
task.execute();
Log.d(TAG, "send reset openface server request to " + currentServerIp);
childFragment.clearPersonTable();
} else {
notifyError(Const.CONNECTIVITY_NOT_AVAILABLE, false, this);
}
}
//TODO: need to check if there is a gabriel server or not
public void sendOpenFaceGetPersonRequest(String remoteIP) {
boolean online = isOnline(this);
if (online){
GabrielConfigurationAsyncTask task =
new GabrielConfigurationAsyncTask(this,
remoteIP,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_GET_PERSON,
this);
task.execute();
Log.d(TAG, "send get person openface server request to "+ currentServerIp);
} else {
notifyError(Const.CONNECTIVITY_NOT_AVAILABLE, false, this);
}
}
/**
* get all avaiable servers names
* @return
*/
private CharSequence[] getAllServerNames(){
String[] dictNames=getResources().getStringArray(R.array.shared_preference_ip_dict_names);
List<String> allNames=new ArrayList<String>();
for (int idx=0; idx<dictNames.length;idx++){
String sharedPreferenceIpDictName=dictNames[idx];
Set<String> existingNames =
mSharedPreferences.getStringSet(sharedPreferenceIpDictName,
new HashSet<String>());
String prefix=getResources().
getStringArray(R.array.add_ip_places_spinner_array)[idx]+
SelectServerAlertDialog.IP_NAME_PREFIX_DELIMITER;
for (String name:existingNames){
allNames.add(prefix+name);
}
}
CharSequence[] result = allNames.toArray(new CharSequence[allNames.size()]);
return result;
}
/*
DialogInterface.OnClickListener launchCopyStateAsyncTaskAction=
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
String ipName= SelectServerAlertDialog
.getItemArrayWithoutPrefix()[which]
.toString();
Log.d(TAG, "selected ip name:" + ipName);
String copyFromIp=mSharedPreferences.getString(ipName, Const.CLOUDLET_GABRIEL_IP);
//send current ip and copy from ip to async task
//fire off copy from ip async task
//return value will be called into onGabrielConfigurationAsyncTaskFinish
GabrielConfigurationAsyncTask task =
new GabrielConfigurationAsyncTask(mActivity,
currentServerIp,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_SYNC_STATE,
(CloudletDemoActivity)mActivity);
task.execute(copyFromIp);
return;
}
};
*/
public boolean actionUploadStateFromLocalFile(){
//check online
if (!checkOnline(this)) {
return false;
}
//launch activity result to readin states
Intent intent = prepareForResultIntentForFilePickerActivity(this, true);
startActivityForResult(intent, FilePickerActivity.REQUEST_FILE);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
switch (id) {
case R.id.manage_servers:
Intent i = new Intent(this, IPSettingActivity.class);
startActivity(i);
return true;
case R.id.setting_reset_openface_server:
//check wifi state
if (checkOnline(this)) {
sendOpenFaceResetRequest(currentServerIp);
return true;
}
return false;
case R.id.setting_save_state:
if (!checkOnline(this)) {
return false;
}
//fire off download state async task
//return value will be called into onGabrielConfigurationAsyncTaskFinish
new GabrielConfigurationAsyncTask(this,
currentServerIp,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_DOWNLOAD_STATE,
this).execute();
return true;
case R.id.setting_save_state_to_gdrive:
if (!checkOnline(this)) {
return false;
}
new GabrielConfigurationAsyncTask(this,
currentServerIp,
GabrielClientActivity.VIDEO_STREAM_PORT,
GabrielClientActivity.RESULT_RECEIVING_PORT,
Const.GABRIEL_CONFIGURATION_DOWNLOAD_STATE_TO_GDRIVE,
this).execute();
return true;
default:
return false;
}
}
// case R.id.setting_copy_server_state:
// //TODO: alertdialog let user select which server to copy from
// AlertDialog dg =SelectServerAlertDialog.createDialog(
// mActivity,
// "Pick a Server",
// getAllServerNames(),
// launchCopyStateAsyncTaskAction,
// SelectServerAlertDialog.cancelAction,
// true);
// dg.show();
// return true;
// case R.id.setting_load_state:
// return actionUploadStateFromLocalFile();
private boolean connectGoogleApiClient(){
if (mGoogleApiClient == null || (!mGoogleApiClient.isConnected())) {
// Create the API client and bind it to an instance variable.
// We use this instance as the callback for connection and connection
// failures.
// Since no account name is passed, the user is prompted to choose.
Log.d(TAG, "creating a new google api client");
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API)
.addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
mGoogleApiClient.connect();
}
return true;
// ConnectionResult result=mGoogleApiClient.blockingConnect(2000, TimeUnit.MILLISECONDS);
// if (result.isSuccess()){
// return true;
// } else {
// if (!result.hasResolution()) {
// // show the localized error dialog.
// GoogleApiAvailability.getInstance().getErrorDialog(this, result.getErrorCode(), 0).show();
// Log.i(TAG, "trying to resolve" + result.toString());
// } else {
// // The failure has a resolution. Resolve it.
// // Called typically when the app is not yet authorized, and an
// // authorization
// // dialog is displayed to the user.
// try {
// result.startResolutionForResult(this, GDRIVE_RESOLVE_CONNECTION_REQUEST_CODE);
// } catch (IntentSender.SendIntentException e) {
// Log.e(TAG, "Exception while starting resolution activity", e);
// }
// }
// return false;
// }
}
@Override
protected void onResume() {
Log.i(TAG, "on resume");
super.onResume();
}
@Override
protected void onPause() {
Log.i(TAG, "on pause");
if (mGoogleApiClient != null) {
mGoogleApiClient.disconnect();
}
super.onPause();
}
private RetrieveDriveFileContentsAsyncTask.GdriveRetrieveFileContentCallBack gdriveCallBack=
new RetrieveDriveFileContentsAsyncTask.GdriveRetrieveFileContentCallBack() {
@Override
public void onFileRetrieved(byte[] content) {
Log.i(TAG, "uploading byte array to server...");
actionUploadStateByteArray(content);
}
};
private void readFileFromGoogleDrive(){
if (mGoogleApiClient !=null && mGoogleApiClient.isConnected()){
// Let the user pick text file
// no files selected by the user.
//http://stackoverflow.com/questions/26331046/android-how-can-i-access-a-spreadsheet-with-the-google-drive-sdk
// cannot open google doc file
IntentSender intentSender = Drive.DriveApi
.newOpenFileActivityBuilder()
.setMimeType(new String[]{"text/plain", "application/vnd.google-apps.document"})
.build(mGoogleApiClient);
try {
startIntentSenderForResult(intentSender, GDRIVE_REQUEST_CODE_OPENER, null, 0, 0, 0);
} catch (IntentSender.SendIntentException e) {
Log.w(TAG, "Unable to send intent", e);
}
}
}
// connection with google drive
public void actionReadStateFileFromGoogleDrive() {
pendingGDriveAction=GDRIVE_ACTION_LOAD;
connectGoogleApiClient();
}
// connection with google drive
public void actionSaveStateFileToGoogleDrive() {
pendingGDriveAction=GDRIVE_ACTION_SAVE;
connectGoogleApiClient();
}
private void openDriveFile(DriveId mSelectedFileDriveId) {
// Reset progress dialog back to zero as we're
// initiating an opening request.
RetrieveDriveFileContentsAsyncTask task=
new RetrieveDriveFileContentsAsyncTask(getApplicationContext(),
gdriveCallBack);
task.execute(mSelectedFileDriveId);
}
@Override
public void onConnected(Bundle bundle) {
Log.i(TAG, "drive API client connected.");
if (pendingGDriveAction == GDRIVE_ACTION_LOAD){
readFileFromGoogleDrive();
} else if (pendingGDriveAction == GDRIVE_ACTION_SAVE){
saveFileToDrive(asyncResponseExtra);
}
pendingGDriveAction=-1;
}
@Override
public void onConnectionSuspended(int i) {
Log.i(TAG, "GoogleApiClient connection suspended");
}
@Override
public void onConnectionFailed(ConnectionResult result) {
// Called whenever the API client fails to connect.
Log.i(TAG, "GoogleApiClient connection failed: " + result.toString());
Toast.makeText(this, "Failed to Connect to Google Drive. Trying to resolve...",
Toast.LENGTH_SHORT).show();
if (!result.hasResolution()) {
// show the localized error dialog.
GoogleApiAvailability.getInstance().getErrorDialog(this, result.getErrorCode(), 0).show();
Log.i(TAG, "trying to resolve" + result.toString());
pendingGDriveAction=-1;
return;
}
// The failure has a resolution. Resolve it.
// Called typically when the app is not yet authorized, and an
// authorization
// dialog is displayed to the user.
try {
result.startResolutionForResult(this, GDRIVE_RESOLVE_CONNECTION_REQUEST_CODE);
} catch (IntentSender.SendIntentException e) {
Log.e(TAG, "Exception while starting resolution activity", e);
}
}
/**
* Create a new file and save it to Drive.
*/
private void saveFileToDrive(final byte[] state) {
// Start by creating a new contents, and setting a callback.
if (mGoogleApiClient!=null && mGoogleApiClient.isConnected()){
Log.i(TAG, "saving file to drive.");
Drive.DriveApi.newDriveContents(mGoogleApiClient)
.setResultCallback(new ResultCallback<DriveApi.DriveContentsResult>() {
@Override
public void onResult(DriveApi.DriveContentsResult result) {
// If the operation was not successful, we cannot do anything
// and must
// fail.
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
// Otherwise, we can write our data to the new contents.
Log.i(TAG, "New contents created.");
// Get an output stream for the contents.
OutputStream outputStream = result.getDriveContents().getOutputStream();
// Write the bitmap data from it.
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
bos.write(Const.OPENFACE_STATE_FILE_MAGIC_SEQUENCE.getBytes());
bos.write(state);
outputStream.write(bos.toByteArray());
bos.close();
outputStream.close();
} catch (IOException e1) {
Log.i(TAG, "Unable to write file contents.");
}
// Create the initial metadata - MIME type and title.
// Note that the user will be able to change the title later.
SimpleDateFormat dateFormat = new SimpleDateFormat("MM_dd_hh_mm_ss");
GregorianCalendar cal = new GregorianCalendar();
dateFormat.setTimeZone(cal.getTimeZone());
String hint = "openface_"+ dateFormat.format(cal.getTime()) +".txt";
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setMimeType("text/plain").setTitle(hint).build();
// Create an intent for the file chooser, and start it.
IntentSender intentSender = Drive.DriveApi
.newCreateFileActivityBuilder()
.setInitialMetadata(metadataChangeSet)
.setInitialDriveContents(result.getDriveContents())
.build(mGoogleApiClient);
try {
startIntentSenderForResult(
intentSender, GDRIVE_REQUEST_CODE_CREATOR, null, 0, 0, 0);
} catch (IntentSender.SendIntentException e) {
Log.i(TAG, "Failed to launch file chooser.");
}
}
});
} else {
Toast.makeText(this, "failed to connect to google drive", Toast.LENGTH_LONG).show();
}
}
}
| |
package com.sandwich.koan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.easymock.EasyMock;
import org.junit.Ignore;
import org.junit.Test;
import com.sandwich.koan.TestUtils.ArgRunner;
import com.sandwich.koan.TestUtils.TwoObjectAssertion;
import com.sandwich.koan.path.CommandLineTestCase;
public class TestUtilsTest extends CommandLineTestCase {
@Test
public void testEqualsContractEnforcement_integerIdentity_happyPath() throws Exception {
Integer one = 1;
TestUtils.assertEqualsContractEnforcement(one, one, one);
}
@Test
public void testEqualsContractEnforcement_integerObject_happyPath() throws Exception {
TestUtils.assertEqualsContractEnforcement(new Integer(1), new Integer(1), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_integer_exceptionPath0() throws Exception {
TestUtils.assertEqualsContractEnforcement(new Integer(2), new Integer(1), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_integer_exceptionPath1() throws Exception {
TestUtils.assertEqualsContractEnforcement(new Integer(1), new Integer(2), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_integer_exceptionPath() throws Exception {
TestUtils.assertEqualsContractEnforcement(new Integer(1), new Integer(1), new Integer(2));
}
@Test
public void testHashCodeContractEnforcement_integerIdentity_happyPath() throws Exception {
Integer one = 1;
TestUtils.assertHashCodeContractEnforcement(one, one, one);
}
@Test
public void testHashCodeContractEnforcement_integerObject_happyPath() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new Integer(1), new Integer(1), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_integer_exceptionPath0() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new Integer(2), new Integer(1), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_integer_exceptionPath1() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new Integer(1), new Integer(2), new Integer(1));
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_integer_exceptionPath() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new Integer(1), new Integer(1), new Integer(2));
}
@Test
public void testHashCodeContractEnforcement_testObj_happyPath() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementBase(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_testObj_exceptionPath0() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new ContractEnforcementSubclass(),
new ContractEnforcementBase(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_testObj_exceptionPath1() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementSubclass(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testHashCodeContractEnforcement_testObj_exceptionPath2() throws Exception {
TestUtils.assertHashCodeContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementBase(),
new ContractEnforcementSubclass());
}
@Test
public void testEqualsContractEnforcement_testObj_happyPath() throws Exception {
TestUtils.assertEqualsContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementBase(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_testObj_exceptionPath0() throws Exception {
TestUtils.assertEqualsContractEnforcement(new ContractEnforcementSubclass(),
new ContractEnforcementBase(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_testObj_exceptionPath1() throws Exception {
TestUtils.assertEqualsContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementSubclass(),
new ContractEnforcementBase());
}
@Test(expected=AssertionError.class)
public void testEqualsContractEnforcement_testObj_exceptionPath2() throws Exception {
TestUtils.assertEqualsContractEnforcement(new ContractEnforcementBase(),
new ContractEnforcementBase(),
new ContractEnforcementSubclass());
}
static class ContractEnforcementBase {
int i = 1;
@Override
public boolean equals(Object o){
return o instanceof ContractEnforcementBase && i == ((ContractEnforcementBase)o).i;
}
@Override
public int hashCode(){
return 1;
}
}
static class ContractEnforcementSubclass extends ContractEnforcementBase {
int j = 2;
@Override
public boolean equals(Object o){
return o instanceof ContractEnforcementSubclass
&& i == ((ContractEnforcementSubclass)o).i
&& j == ((ContractEnforcementSubclass)o).j;
}
@Override
public int hashCode(){
return 2;
}
}
@Test(expected=AssertionError.class, timeout=2000)
public void testEqualsConcurrency_concurrentAccessFails() throws Exception {
TestUtils.doSimultaneouslyAndRepetitively(new TwoObjectAssertion() {
public void assertOn(String msg, Object o0, Object o1) {
assertEquals(msg, o0, o1);
}
}, IllegalMonitorStateException.class,
new Runnable() {
public void run() {
waste(10);
}
}, new Runnable() {
public void run() {
waste(11);
}
}, new Runnable() {
public void run() {
waste(3);
}
});
assertSystemErrContains("Thread-1\" java.lang.IllegalMonitorStateException");
assertSystemErrContains("Thread-2\" java.lang.IllegalMonitorStateException");
assertSystemErrContains("Thread-3\" java.lang.IllegalMonitorStateException");
assertSystemErrContains("Thread-4");
}
@Test(expected=java.lang.AssertionError.class, timeout=500)
public void testEqualsConcurrency_concurrentAccessFails_assertIllegalMonitorStateException() throws Exception {
TestUtils.doSimultaneouslyAndRepetitively(new TwoObjectAssertion() {
public void assertOn(String msg, Object o0, Object o1) {
assertEquals(msg, o0, o1);
}
},
IllegalMonitorStateException.class,
new Runnable() {
public void run() {
waste(10);
}
},
new Runnable() {
public void run() {
waste(11);
}
},
new Runnable() {
public void run() {
waste(3);
}
});
}
@Test @Ignore // disk/os access causing random failures at this low a deviation in timing
public void testEqualsConcurrency() throws Exception {
TestUtils.doSimultaneouslyAndRepetitively(new TwoObjectAssertion() {
public void assertOn(String msg, Object o0, Object o1) {
assertEquals(msg, o0, o1);
}
}, new Runnable(){
public void run() {
wasteSynchronized(10);
}
}, new Runnable(){
public void run() {
wasteSynchronized(11);
}
}, new Runnable(){
public void run() {
wasteSynchronized(3);
}
});
}
@Test @Ignore // disk/os access causing random failures at this low a deviation in timing
public void testEqualsConcurrency_II() throws Exception {
TestUtils.doSimultaneouslyAndRepetitively(new TwoObjectAssertion() {
public void assertOn(String msg, Object o0, Object o1) {
assertEquals(msg, o0, o1);
}
}, new Runnable(){
public void run() {
wasteSynchronized(10);
}
}, new Runnable(){
public void run() {
wasteSynchronized(11);
}
}, new Runnable(){
public void run() {
wasteSynchronized(4);
}
}, new Runnable(){
public void run() {
wasteSynchronized(6);
}
});
}
private void waste(int i) {
try {
wait(i);
} catch (InterruptedException e) {
fail(e.getMessage());
}
}
private int wasteSynchronized(int i) { synchronized(this){
try {
wait(i);
} catch (InterruptedException e) {
fail(e.getMessage());
}
return i;
}}
@Test
public void testForEachLine_threeNewLines() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\n\n\n", runner);
EasyMock.verify(runner);
}
@Test
public void testForEachLine_spaceNewLineNewLine(){
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run(" ");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine(" \n\n", runner);
EasyMock.verify(runner);
}
@Test
public void testForEachLine_newLineNewLineSpace(){
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\n\n ", runner);
EasyMock.verify(runner);
}
@Test
public void testMixingBackslashRAndBackslashNNewLines() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\r \n ", runner); // can mix and match \r and \n
EasyMock.verify(runner);
}
@Test
public void testMixingBackslashNAndBackslashRNewLines() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\n \r ", runner); // can mix and match \r and \n
EasyMock.verify(runner);
}
@Test
public void testForEachLine_emptyString() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("", runner);
EasyMock.verify(runner);
}
@Test
public void testForEachLine_nothingThreeNewLinesSeparatedBy1Space() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\n \n \n", runner);
EasyMock.verify(runner);
}
@Test
public void testForEachLine_nothingThreeNewLinesSeparatedBy1SpaceThen2Spaces() throws Exception {
@SuppressWarnings("unchecked")
ArgRunner<String> runner = EasyMock.createStrictMock(ArgRunner.class);
runner.run("");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run(" ");
EasyMock.expectLastCall();
runner.run("");
EasyMock.expectLastCall();
EasyMock.replay(runner);
TestUtils.forEachLine("\n \n \n", runner);
EasyMock.verify(runner);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.indexing;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
/**
*/
public class ShardIndexingService extends AbstractIndexShardComponent {
private final IndexingSlowLog slowLog;
private final StatsHolder totalStats = new StatsHolder();
private final CopyOnWriteArrayList<IndexingOperationListener> listeners = new CopyOnWriteArrayList<>();
private volatile Map<String, StatsHolder> typesStats = ImmutableMap.of();
public ShardIndexingService(ShardId shardId, Settings indexSettings) {
super(shardId, indexSettings);
this.slowLog = new IndexingSlowLog(indexSettings);
}
/**
* Returns the stats, including type specific stats. If the types are null/0 length, then nothing
* is returned for them. If they are set, then only types provided will be returned, or
* <tt>_all</tt> for all types.
*/
public IndexingStats stats(String... types) {
IndexingStats.Stats total = totalStats.stats();
Map<String, IndexingStats.Stats> typesSt = null;
if (types != null && types.length > 0) {
typesSt = new HashMap<>(typesStats.size());
if (types.length == 1 && types[0].equals("_all")) {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
} else {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
if (Regex.simpleMatch(types, entry.getKey())) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
}
}
}
return new IndexingStats(total, typesSt);
}
public void addListener(IndexingOperationListener listener) {
listeners.add(listener);
}
public void removeListener(IndexingOperationListener listener) {
listeners.remove(listener);
}
public Engine.Create preCreate(Engine.Create create) {
totalStats.indexCurrent.inc();
typeStats(create.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
create = listener.preCreate(create);
}
return create;
}
public void postCreateUnderLock(Engine.Create create) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreateUnderLock(create);
} catch (Exception e) {
logger.warn("postCreateUnderLock listener [{}] failed", e, listener);
}
}
}
public void throttlingActivated() {
totalStats.setThrottled(true);
}
public void throttlingDeactivated() {
totalStats.setThrottled(false);
}
public void postCreate(Engine.Create create) {
long took = create.endTime() - create.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(create.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postCreate(create, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreate(create);
} catch (Exception e) {
logger.warn("postCreate listener [{}] failed", e, listener);
}
}
}
public void postCreate(Engine.Create create, Throwable ex) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreate(create, ex);
} catch (Throwable t) {
logger.warn("postCreate listener [{}] failed", t, listener);
}
}
}
public Engine.Index preIndex(Engine.Index index) {
totalStats.indexCurrent.inc();
typeStats(index.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
index = listener.preIndex(index);
}
return index;
}
public void postIndexUnderLock(Engine.Index index) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndexUnderLock(index);
} catch (Exception e) {
logger.warn("postIndexUnderLock listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index) {
long took = index.endTime() - index.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(index.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postIndex(index, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndex(index);
} catch (Exception e) {
logger.warn("postIndex listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index, Throwable ex) {
totalStats.indexCurrent.dec();
typeStats(index.type()).indexCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndex(index, ex);
} catch (Throwable t) {
logger.warn("postIndex listener [{}] failed", t, listener);
}
}
}
public Engine.Delete preDelete(Engine.Delete delete) {
totalStats.deleteCurrent.inc();
typeStats(delete.type()).deleteCurrent.inc();
for (IndexingOperationListener listener : listeners) {
delete = listener.preDelete(delete);
}
return delete;
}
public void postDeleteUnderLock(Engine.Delete delete) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postDeleteUnderLock(delete);
} catch (Exception e) {
logger.warn("postDeleteUnderLock listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete) {
long took = delete.endTime() - delete.startTime();
totalStats.deleteMetric.inc(took);
totalStats.deleteCurrent.dec();
StatsHolder typeStats = typeStats(delete.type());
typeStats.deleteMetric.inc(took);
typeStats.deleteCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener.postDelete(delete);
} catch (Exception e) {
logger.warn("postDelete listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete, Throwable ex) {
totalStats.deleteCurrent.dec();
typeStats(delete.type()).deleteCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener. postDelete(delete, ex);
} catch (Throwable t) {
logger.warn("postDelete listener [{}] failed", t, listener);
}
}
}
public void noopUpdate(String type) {
totalStats.noopUpdates.inc();
typeStats(type).noopUpdates.inc();
}
public void clear() {
totalStats.clear();
synchronized (this) {
if (!typesStats.isEmpty()) {
MapBuilder<String, StatsHolder> typesStatsBuilder = MapBuilder.newMapBuilder();
for (Map.Entry<String, StatsHolder> typeStats : typesStats.entrySet()) {
if (typeStats.getValue().totalCurrent() > 0) {
typeStats.getValue().clear();
typesStatsBuilder.put(typeStats.getKey(), typeStats.getValue());
}
}
typesStats = typesStatsBuilder.immutableMap();
}
}
}
private StatsHolder typeStats(String type) {
StatsHolder stats = typesStats.get(type);
if (stats == null) {
synchronized (this) {
stats = typesStats.get(type);
if (stats == null) {
stats = new StatsHolder();
typesStats = MapBuilder.newMapBuilder(typesStats).put(type, stats).immutableMap();
}
}
}
return stats;
}
public void onRefreshSettings(Settings settings) {
slowLog.onRefreshSettings(settings);
}
static class StatsHolder {
public final MeanMetric indexMetric = new MeanMetric();
public final MeanMetric deleteMetric = new MeanMetric();
public final CounterMetric indexCurrent = new CounterMetric();
public final CounterMetric deleteCurrent = new CounterMetric();
public final CounterMetric noopUpdates = new CounterMetric();
public final CounterMetric throttleTimeMillisMetric = new CounterMetric();
volatile boolean isThrottled = false;
volatile long startOfThrottleNS;
public IndexingStats.Stats stats() {
long currentThrottleNS = 0;
if (isThrottled && startOfThrottleNS != 0) {
currentThrottleNS += System.nanoTime() - startOfThrottleNS;
if (currentThrottleNS < 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip must have happened, have to ignore this value
currentThrottleNS = 0;
}
}
return new IndexingStats.Stats(
indexMetric.count(), TimeUnit.NANOSECONDS.toMillis(indexMetric.sum()), indexCurrent.count(),
deleteMetric.count(), TimeUnit.NANOSECONDS.toMillis(deleteMetric.sum()), deleteCurrent.count(),
noopUpdates.count(), isThrottled, TimeUnit.MILLISECONDS.toMillis(throttleTimeMillisMetric.count() + TimeValue.nsecToMSec(currentThrottleNS)));
}
void setThrottled(boolean isThrottled) {
if (!this.isThrottled && isThrottled) {
startOfThrottleNS = System.nanoTime();
} else if (this.isThrottled && !isThrottled) {
assert startOfThrottleNS > 0 : "Bad state of startOfThrottleNS";
long throttleTimeNS = System.nanoTime() - startOfThrottleNS;
if (throttleTimeNS >= 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip may have occurred but never want to add a negative number
throttleTimeMillisMetric.inc(TimeValue.nsecToMSec(throttleTimeNS));
}
}
this.isThrottled = isThrottled;
}
public long totalCurrent() {
return indexCurrent.count() + deleteMetric.count();
}
public void clear() {
indexMetric.clear();
deleteMetric.clear();
}
}
}
| |
package org.pale.gorm;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
/**
* A class to hold an axis-aligned bounding box and manipulate it.
*
* @author white
*
*/
public class Extent {
public interface LocationRunner {
void run(int x, int y, int z);
}
/**
* An iterable for iterating over points in an extent, returned by
* getVectorIterator()
*
* @author white
*
*/
public class VectorIterable implements Iterable<IntVector> {
/**
* Used to iterate along walls of an extent, returned by
* VectorIterable.iterator()
*
* @author white
*
*/
public class VectorIterator implements Iterator<IntVector> {
IntVector pos, vec;
public VectorIterator() {
pos = new IntVector(minx, miny, minz);
switch (xzOnly ? getLongestAxisXZ() : getLongestAxis()) {
case X:
vec = new IntVector(1, 0, 0);
break;
case Y:
vec = new IntVector(0, 1, 0);
break;
default:
case Z:
vec = new IntVector(0, 0, 1);
break;
}
pos = pos.add(vec.scale(offset));
vec = vec.scale(step);
}
@Override
public boolean hasNext() {
return contains(pos);
}
@Override
public IntVector next() {
IntVector p = new IntVector(pos);
pos = pos.add(vec);
return p;
}
@Override
public void remove() {
}
}
private int step, offset;
private boolean xzOnly;
VectorIterable(int step, int offset, boolean xzOnly) {
this.step = step;
this.offset = offset;
this.xzOnly = xzOnly;
}
@Override
public Iterator<IntVector> iterator() {
return new VectorIterator();
}
}
public int minx;
public int maxx;
public int miny;
public int maxy;
public int minz;
public int maxz;
private boolean isset = false;
@Override
public String toString() {
if(isset)
return String.format("[%d - %d, %d - %d, %d - %d]", minx, maxx, miny,
maxy, minz, maxz);
else
return "[invalid]";
}
/**
* Construct an extent from corners of rectangles. It doesn't matter which
* is the max or min corner, the system will sort it out.
* @param x1
* @param y1
* @param z1
* @param x2
* @param y2
* @param z2
*/
public Extent(int x1, int y1, int z1, int x2, int y2, int z2) {
if(x1<x2){
minx = x1;
maxx = x2;
} else {
minx = x2;
maxx = x1;
}
if(y1<y2){
miny = y1;
maxy = y2;
} else {
miny = y2;
maxy = y1;
}
if(z1<z2){
minz = z1;
maxz = z2;
} else {
minz = z2;
maxz = z1;
}
isset = true;
}
/**
* Creates an unset extent.
*/
public Extent() {
}
public Extent(Location loc) {
minx = loc.getBlockX();
miny = loc.getBlockY();
minz = loc.getBlockZ();
maxx = minx;
maxy = miny;
maxz = minz;
isset = true;
}
/**
* Construct an extent centred around a given point in xz, floored at a
* given point in y
*
* @param loc
* @param xsize
* @param ysize
* @param zsize
*/
public Extent(IntVector loc, int xsize, int ysize, int zsize) {
minx = loc.x - xsize / 2;
miny = loc.y;
minz = loc.z - zsize / 2;
maxx = minx + xsize;
maxy = miny + ysize;
maxz = minz + zsize;
isset = true;
}
/**
* Construct a single-block extent
*
* @param x
* @param y
* @param z
*/
public Extent(int x, int y, int z) {
minx = x;
maxx = x;
miny = y;
maxy = y;
minz = z;
maxz = z;
isset = true;
}
/**
* copy constructor
*
* @param e
*/
public Extent(Extent e) {
minx = e.minx;
maxx = e.maxx;
miny = e.miny;
maxy = e.maxy;
minz = e.minz;
maxz = e.maxz;
isset = e.isset;
}
public Extent setX(int x){
Extent e = new Extent(this);
e.minx = x;
e.maxx = x;
return e;
}
public Extent setY(int y){
Extent e = new Extent(this);
e.miny = y;
e.maxy = y;
return e;
}
public Extent setZ(int z){
Extent e = new Extent(this);
e.minz = z;
e.maxz = z;
return e;
}
public int xsize() {
return (maxx - minx) + 1;
}
public int ysize() {
return (maxy - miny) + 1;
}
public int zsize() {
return (maxz - minz) + 1;
}
/**
* Similar to union with a point, but acts in place.
* @param x
* @param y
* @param z
*/
public void addPoint(int x, int y, int z) {
if (!isset) {
minx = x;
maxx = x;
miny = y;
maxy = y;
minz = z;
maxz = z;
isset = true;
} else {
if (x < minx)
minx = x;
if (x > maxx)
maxx = x;
if (y < miny)
miny = y;
if (y > maxy)
maxy = y;
if (z < minz)
minz = z;
if (z > maxz)
maxz = z;
}
}
/**
* produce a vector from the extent, with some fields set to the minimum, and some set
* to the maximum.
*/
public IntVector getCorner(int minFields){
int x = ((minFields & X)!=0) ? minx : maxx;
int y = ((minFields & Y)!=0) ? miny : maxy;
int z = ((minFields & Z)!=0) ? minz : maxz;
return new IntVector(x,y,z);
}
/**
* Function taking an extent and (x,y,z), returning a new extent
*
* @param x
* @param y
* @param z
* @return
*/
public Extent union(int x, int y, int z) {
Extent e = new Extent(this);
e.addPoint(x, y, z);
return e;
}
/**
* Function taking an extent and a location, and returning a new extent
*
* @param loc
*/
public Extent union(Location loc) {
return union(loc.getBlockX(), loc.getBlockY(), loc.getBlockZ());
}
public Extent union(IntVector v) {
return union(v.x, v.y, v.z);
}
/**
* Function of two extents, and returning a new extent
*
* @param loc
*/
public Extent union(Extent e) {
e = union(e.minx, e.miny, e.minz);
e.addPoint(e.maxx, e.maxy, e.maxz);
return e;
}
public final static int X = 1;
public final static int Y = 2;
public final static int Z = 4;
public final static int ALL = X | Y | Z;
public final static int LONGEST = 8;
public final static int LONGESTXZ = 16;
/**
* Get the longest axis as a code
*
* @return
*/
public int getLongestAxis() {
if (xsize() > ysize() && xsize() > zsize())
return X;
else if (zsize() > ysize())
return Z;
else
return Y;
}
public int getLongestAxisXZ() {
return xsize() > zsize() ? X : Z;
}
/**
* Get the shortest axis as a code
* @return
*/
public int getShortestAxis() {
if (xsize() < ysize() && xsize() < zsize())
return X;
else if (zsize() < ysize())
return Z;
else
return Y;
}
/**
* Get length of axis
*/
public int getLengthOfAxis(int axis) {
if (axis == LONGEST)
axis = getLongestAxis();
else if (axis == LONGESTXZ) {
axis = getLongestAxisXZ();
}
switch (axis) {
case X:
return xsize();
case Y:
return ysize();
case Z:
default:
return zsize();
}
}
/**
* Expand the extent by N voxels in some axes
*
* @param n
* @param axis
* flags
* @return new extent
*/
public Extent expand(int n, int axes) {
Extent e = new Extent(this);
if ((axes & LONGEST) != 0) {
axes |= getLongestAxis();
}
if ((axes & LONGESTXZ) != 0) {
axes |= getLongestAxisXZ();
}
if ((axes & X) != 0) {
e.minx -= n;
e.maxx += n;
}
if ((axes & Y) != 0) {
e.miny -= n;
e.maxy += n;
}
if ((axes & Z) != 0) {
e.minz -= n;
e.maxz += n;
}
return e;
}
/**
* stretch the extent by a factor of X,Z in the XY plane
*
* @return new extent
*
*/
public Extent scaleXZ(double x, double z) {
Extent e = new Extent(this);
double minxd = minx;
double maxxd = maxx;
double minzd = miny;
double maxzd = maxy;
double cx = (minxd + maxxd) * 0.5;
double cz = (minzd + maxzd) * 0.5;
double sx = (maxxd - minxd) * x * 0.5;
double sz = (maxzd - minzd) * z * 0.5;
e.minx = (int) (cx - sx);
e.maxx = (int) (cx + sx);
e.minz = (int) (cz - sz);
e.maxz = (int) (cz + sz);
return e;
}
/**
* Assuming that the floor (miny) is correct, set the ceiling height.
*
* @param n
*/
public Extent setHeight(int n) {
Extent e = new Extent(this);
e.maxy = miny + n;
return e;
}
public boolean contains(IntVector v) {
return contains(v.x, v.y, v.z);
}
public boolean contains(int x, int y, int z) {
return x >= minx && x <= maxx && y >= miny && y <= maxy && z >= minz
&& z <= maxz;
}
public boolean intersects(Extent e) {
return
e.minx <= maxx && e.maxx >= minx &&
e.miny <= maxy && e.maxy >= miny &&
e.minz <= maxz && e.maxz >= minz;
}
/**
* Returns true if this extent contains any blocks we shouldn't overwrite.
* These should be natural blocks, not built ones - it should be possible to
* overwrite these
*
* @return
*/
public boolean intersectsWorld() {
World w = Castle.getInstance().getWorld();
for (int x = minx; x <= maxx; x++) {
for (int y = miny; y <= maxy; y++) {
for (int z = minz; z <= maxz; z++) {
int typeId = w.getBlockAt(x, y, z).getTypeId();
// avoid anything less than block 17 except for 5 (which is
// wood, so roofs are ok)
if (typeId > 0 && typeId < 17 && typeId != 5)
return true;
}
}
}
return false;
}
/**
* Returns how much of this extent is filled with dirt
* @return
*/
public double amountOfSoil() {
World w = Castle.getInstance().getWorld();
// basically we down until we find a block with greater than a certain
// number of natural solids in it
int count = 0; // count of natural blocks
for (int y = maxy; y >= miny; y--) {
for (int x = minx; x <= maxx; x++) {
for (int z = minz; z <= maxz; z++) {
int typeId = w.getBlockAt(x, y, z).getTypeId();
// avoid anything less than block 17 except for 5 (which is
// wood, so roofs are ok)
if (typeId > 0 && typeId < 17 && typeId != 5)
count++;
}
}
}
return ((double)count)/((double)volume());
}
public IntVector getCentre() {
return new IntVector((minx + maxx) / 2, (miny + maxy) / 2,
(minz + maxz) / 2);
}
public Extent addvec(int x, int y, int z) {
Extent e = new Extent(this);
e.minx += x;
e.maxx += x;
e.miny += y;
e.maxy += y;
e.minz += z;
e.maxz += z;
return e;
}
public Extent addvec(IntVector v) {
return addvec(v.x, v.y, v.z);
}
public Extent subvec(int x, int y, int z) {
Extent e = new Extent(this);
e.minx -= x;
e.maxx -= x;
e.miny -= y;
e.maxy -= y;
e.minz -= z;
e.maxz -= z;
return e;
}
public Extent subvec(IntVector v) {
return subvec(v.x, v.y, v.z);
}
/**
* Find the maximum height of this extent above the ground
*
* @return
*/
public int getMaxHeightAboveWorld() {
int maxheight = 0;
World w = Castle.getInstance().getWorld();
for (int x = minx; x <= maxx; x++) {
for (int z = minz; z <= maxz; z++) {
int h = w.getHighestBlockYAt(x, z);
int diff = miny - h;
if (diff > maxheight)
maxheight = diff;
}
}
return maxheight;
}
/**
* Find the minimum height of this extent above the ground
*
* @return
*/
public int getMinHeightAboveWorld() {
int minheight = 1000;
World w = Castle.getInstance().getWorld();
for (int x = minx; x <= maxx; x++) {
for (int z = minz; z <= maxz; z++) {
int h = w.getHighestBlockYAt(x, z);
int diff = miny - h;
if (diff < minheight)
minheight = diff;
}
}
return minheight;
}
static final int BADHEIGHT = -1000;
/**
* Find the height of a square within this extent, ignoring any blocks above
* the extent. Assumes a completely empty column to be -1000; ditto blocks
* outside the xz of the extent
*
* @param x
* @param z
* @return
*/
public int getHeightWithin(int x, int z) {
World w = Castle.getInstance().getWorld();
if (x < minx || x > maxx || z < minz || z > maxz)
return BADHEIGHT;
for (int y = maxy; y >= miny; y--) {
Block b = w.getBlockAt(x, y, z);
if (b.getType().isSolid())
return y;
}
return BADHEIGHT;
}
/**
* Returns a new extent which is a single block thick, representing one of
* the walls of this extent.
*
* @return
*/
public Extent getWall(Direction d) {
Extent e = new Extent(this);
switch (d) {
case NORTH:
e.maxz = e.minz; // remember NORTH is direction of decreasing Z
break;
case SOUTH:
e.minz = e.maxz;
break;
case EAST:
e.minx = e.maxx;
break;
case WEST:
e.maxx = e.minx;
break;
case UP:
e.miny = e.maxy;
break;
case DOWN:
e.maxy = e.miny;
break;
}
return e;
}
/**
* Generate an extent for an extent of given width and height, normal facing
* in the given direction, with a given depth on each side (so '1' gives a
* depth of 3.)
*
*/
public Extent(Direction d, int w, int h, int depth) {
miny = 0;
maxy = h - 1;
switch (d) {
case NORTH:
case SOUTH:
minx = -w / 2;
maxx = minx + w - 1;
miny = 0;
maxy = h - 1;
minz = -depth;
maxz = depth;
break;
case EAST:
case WEST:
minx = -depth;
maxx = depth;
miny = 0;
maxy = h - 1;
minz = -w / 2;
maxz = minx + w - 1;
break;
case UP: // for up and down, h is ignored and w is used in both x and z
case DOWN:
minx = -w / 2;
maxx = minx + w - 1;
miny = -depth;
maxy = depth;
minz = -w / 2;
maxz = minx + w - 1;
}
}
public Extent intersect(Extent e) {
if (intersects(e)) {
Extent out = new Extent();
out.minx = minx > e.minx ? minx : e.minx;
out.maxx = maxx < e.maxx ? maxx : e.maxx;
out.miny = miny > e.miny ? miny : e.miny;
out.maxy = maxy < e.maxy ? maxy : e.maxy;
out.minz = minz > e.minz ? minz : e.minz;
out.maxz = maxz < e.maxz ? maxz : e.maxz;
out.isset = true;
return out;
} else
return null;
}
/**
* Return true if the passed extent is entirely inside me
*
* @param e1
* @return
*/
public boolean contains(Extent e1) {
return e1.minx >= minx && e1.maxx <= maxx && e1.miny >= miny
&& e1.maxy <= maxy && e1.minz >= minz && e1.maxz <= maxz;
}
public boolean containsThis(Material m) {
World w = Castle.getInstance().getWorld();
boolean contains = false;
for (int x = this.minx; x <= this.maxx; x++) {
for (int z = this.minz; z <= this.maxz; z++) {
for (int y = this.miny; y <= this.maxy; y++) {
Block thisBlock = w.getBlockAt(x, y, z);
if (thisBlock.getType() == m) {
contains = true;
}
}
}
}
return contains;
}
public int volume() {
return (this.xsize() * this.zsize() * this.ysize());
}
/**
* grow the extent along the given direction by the given number of steps;
* faster than scale-add.
*/
public Extent growDirection(Direction d, int n) {
Extent e = new Extent(this);
switch (d) {
case EAST:
e.maxx += n;
break;
case WEST:
e.minx -= n;
break;
case NORTH:
e.minz -= n;
break;
case SOUTH:
e.maxz += n;
break;
case UP:
e.maxy += n;
break;
case DOWN:
e.miny -= n;
break;
}
return e;
}
/**
* Create an iterator which will steps along the longest axis of an extent.
*
* @return
*/
public Iterable<IntVector> getVectorIterable(int step, int offset,
boolean xzOnly) {
return new VectorIterable(step, offset, xzOnly);
}
static final int INTERNAL_CHUNK_SIZE = 32;
/**
* Return a 'chunk code' for a given pixel. Nothing to do with MC's chunks,
* although it was once.
*
* @param x
* @param z
* @return
*/
public static int getChunkCode(int x, int z) {
x /= INTERNAL_CHUNK_SIZE;
z /= INTERNAL_CHUNK_SIZE;
return x + z * INTERNAL_CHUNK_SIZE;
}
/**
* Build a list of all the chunks in the extent. They're not actually real
* chunks, just unique codes. My chunks are bigger than MCs.
*
* @return
*/
public Set<Integer> getChunks() {
Set<Integer> chunks = new HashSet<Integer>();
for (int x = minx; x < maxx + INTERNAL_CHUNK_SIZE; x += INTERNAL_CHUNK_SIZE/2) {
for (int z = minz; z < maxz + INTERNAL_CHUNK_SIZE; z += INTERNAL_CHUNK_SIZE/2) {
chunks.add(getChunkCode(x, z));
}
}
return chunks;
}
public void runOnAllLocations(LocationRunner e) {
for (int x = minx; x <= maxx; x++) {
for (int y = miny; y <= maxy; y++) {
for (int z = minz; z <= maxz; z++) {
e.run(x, y, z);
}
}
}
}
/// returns true if all four INTERIOR corners are over water
public boolean entirelyOnWater() {
Extent e = this.expand(-1, X|Z);
Castle c = Castle.getInstance();
return c.onWater(e.minx,e.minz)&&
c.onWater(e.maxx,e.minz)&&
c.onWater(e.maxx,e.maxz)&&
c.onWater(e.minx,e.maxz);
}
}
| |
package com.github.sandor_balazs.nosql_java.repository;
import com.datastax.driver.core.*;
import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;
import com.github.sandor_balazs.nosql_java.domain.User;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Optional;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* Cassandra repository for the User entity.
*/
@Repository
public class UserRepository {
@Inject
private Session session;
private Mapper<User> mapper;
private PreparedStatement findAllStmt;
private PreparedStatement findOneByActivationKeyStmt;
private PreparedStatement findOneByResetKeyStmt;
private PreparedStatement insertByActivationKeyStmt;
private PreparedStatement insertByResetKeyStmt;
private PreparedStatement deleteByActivationKeyStmt;
private PreparedStatement deleteByResetKeyStmt;
private PreparedStatement findOneByLoginStmt;
private PreparedStatement insertByLoginStmt;
private PreparedStatement deleteByLoginStmt;
private PreparedStatement findOneByEmailStmt;
private PreparedStatement insertByEmailStmt;
private PreparedStatement deleteByEmailStmt;
@PostConstruct
public void init() {
mapper = new MappingManager(session).mapper(User.class);
findAllStmt = session.prepare("SELECT * FROM user");
findOneByActivationKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
findOneByResetKeyStmt = session.prepare(
"SELECT id " +
"FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
insertByActivationKeyStmt = session.prepare(
"INSERT INTO user_by_activation_key (activation_key, id) " +
"VALUES (:activation_key, :id)");
insertByResetKeyStmt = session.prepare(
"INSERT INTO user_by_reset_key (reset_key, id) " +
"VALUES (:reset_key, :id)");
deleteByActivationKeyStmt = session.prepare(
"DELETE FROM user_by_activation_key " +
"WHERE activation_key = :activation_key");
deleteByResetKeyStmt = session.prepare(
"DELETE FROM user_by_reset_key " +
"WHERE reset_key = :reset_key");
findOneByLoginStmt = session.prepare(
"SELECT id " +
"FROM user_by_login " +
"WHERE login = :login");
insertByLoginStmt = session.prepare(
"INSERT INTO user_by_login (login, id) " +
"VALUES (:login, :id)");
deleteByLoginStmt = session.prepare(
"DELETE FROM user_by_login " +
"WHERE login = :login");
findOneByEmailStmt = session.prepare(
"SELECT id " +
"FROM user_by_email " +
"WHERE email = :email");
insertByEmailStmt = session.prepare(
"INSERT INTO user_by_email (email, id) " +
"VALUES (:email, :id)");
deleteByEmailStmt = session.prepare(
"DELETE FROM user_by_email " +
"WHERE email = :email");
}
public User findOne(String id) {
return mapper.get(id);
}
public Optional<User> findOneById(String id) {
return Optional.of(findOne(id));
}
public Optional<User> findOneByActivationKey(String activationKey) {
BoundStatement stmt = findOneByActivationKeyStmt.bind();
stmt.setString("activation_key", activationKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByResetKey(String resetKey) {
BoundStatement stmt = findOneByResetKeyStmt.bind();
stmt.setString("reset_key", resetKey);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByEmail(String email) {
BoundStatement stmt = findOneByEmailStmt.bind();
stmt.setString("email", email);
return findOneFromIndex(stmt);
}
public Optional<User> findOneByLogin(String login) {
BoundStatement stmt = findOneByLoginStmt.bind();
stmt.setString("login", login);
return findOneFromIndex(stmt);
}
public List<User> findAll() {
return mapper.map(session.execute(findAllStmt.bind())).all();
}
public User save(User user) {
User oldUser = mapper.get(user.getId());
if (oldUser != null) {
if (!StringUtils.isEmpty(oldUser.getActivationKey()) && !oldUser.getActivationKey().equals(user.getActivationKey())) {
session.execute(deleteByActivationKeyStmt.bind().setString("activation_key", oldUser.getActivationKey()));
}
if (!StringUtils.isEmpty(oldUser.getResetKey()) && !oldUser.getResetKey().equals(user.getResetKey())) {
session.execute(deleteByResetKeyStmt.bind().setString("reset_key", oldUser.getResetKey()));
}
if (!StringUtils.isEmpty(oldUser.getLogin()) && !oldUser.getLogin().equals(user.getLogin())) {
session.execute(deleteByLoginStmt.bind().setString("login", oldUser.getLogin()));
}
if (!StringUtils.isEmpty(oldUser.getEmail()) && !oldUser.getEmail().equals(user.getEmail())) {
session.execute(deleteByEmailStmt.bind().setString("email", oldUser.getEmail()));
}
}
BatchStatement batch = new BatchStatement();
batch.add(mapper.saveQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(insertByActivationKeyStmt.bind()
.setString("activation_key", user.getActivationKey())
.setString("id", user.getId()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(insertByResetKeyStmt.bind()
.setString("reset_key", user.getResetKey())
.setString("id", user.getId()));
}
batch.add(insertByLoginStmt.bind()
.setString("login", user.getLogin())
.setString("id", user.getId()));
batch.add(insertByEmailStmt.bind()
.setString("email", user.getEmail())
.setString("id", user.getId()));
session.execute(batch);
return user;
}
public void delete(User user) {
BatchStatement batch = new BatchStatement();
batch.add(mapper.deleteQuery(user));
if (!StringUtils.isEmpty(user.getActivationKey())) {
batch.add(deleteByActivationKeyStmt.bind().setString("activation_key", user.getActivationKey()));
}
if (!StringUtils.isEmpty(user.getResetKey())) {
batch.add(deleteByResetKeyStmt.bind().setString("reset_key", user.getResetKey()));
}
batch.add(deleteByLoginStmt.bind().setString("login", user.getLogin()));
batch.add(deleteByEmailStmt.bind().setString("email", user.getEmail()));
session.execute(batch);
}
private Optional<User> findOneFromIndex(BoundStatement stmt) {
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
return Optional.empty();
}
return Optional.ofNullable(rs.one().getString("id"))
.map(id -> Optional.ofNullable(mapper.get(id)))
.get();
}
}
| |
/*
* OfficeFloor - http://www.officefloor.net
* Copyright (C) 2005-2018 Daniel Sagenschneider
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.officefloor.benchmark;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.ClosedChannelException;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import org.apache.commons.text.StringEscapeUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import io.r2dbc.pool.PoolingConnectionFactoryProvider;
import io.r2dbc.spi.Batch;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactories;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.ConnectionFactoryOptions;
import io.r2dbc.spi.R2dbcTransientResourceException;
import lombok.AllArgsConstructor;
import lombok.Data;
import net.officefloor.benchmark.RawOfficeFloorMain.Fortune;
import net.officefloor.benchmark.RawOfficeFloorMain.Message;
import net.officefloor.benchmark.RawOfficeFloorMain.World;
import net.officefloor.frame.api.manage.OfficeFloor;
import net.officefloor.frame.api.manage.ProcessManager;
import net.officefloor.frame.api.managedobject.ManagedObjectContext;
import net.officefloor.frame.api.managedobject.ProcessSafeOperation;
import net.officefloor.frame.api.managedobject.pool.ThreadCompletionListener;
import net.officefloor.server.RequestHandler;
import net.officefloor.server.SocketManager;
import net.officefloor.server.SocketServicer;
import net.officefloor.server.http.AbstractHttpServicerFactory;
import net.officefloor.server.http.HttpHeaderName;
import net.officefloor.server.http.HttpHeaderValue;
import net.officefloor.server.http.HttpRequest;
import net.officefloor.server.http.HttpResponse;
import net.officefloor.server.http.HttpResponseHeaders;
import net.officefloor.server.http.HttpServerLocation;
import net.officefloor.server.http.HttpServerSocketManagedObjectSource;
import net.officefloor.server.http.HttpStatus;
import net.officefloor.server.http.ServerHttpConnection;
import net.officefloor.server.http.impl.HttpServerLocationImpl;
import net.officefloor.server.http.impl.ProcessAwareServerHttpConnectionManagedObject;
import net.officefloor.server.http.parse.HttpRequestParser;
import net.officefloor.server.http.parse.HttpRequestParser.HttpRequestParserMetaData;
import net.officefloor.server.stream.ServerWriter;
import net.officefloor.server.stream.impl.ThreadLocalStreamBufferPool;
import net.officefloor.web.executive.CpuCore;
import net.officefloor.web.executive.CpuCore.LogicalCpu;
import net.openhft.affinity.Affinity;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
import net.officefloor.web.executive.CpuCore;
import net.openhft.affinity.Affinity;
/**
* <p>
* {@link SocketManager} raw performance.
* <p>
* Allows determining the overhead of the {@link OfficeFloor} framework.
*/
public class RawOfficeFloorMain {
/**
* Database query load capacity to handle validation load.
*/
private static final int QUERY_LOAD_CAPACITY = 512 * (20 + 1); // update 20 selects then batch
/**
* Buffer size of queries.
*/
private static final int QUERY_BUFFER_SIZE = 512;
/**
* {@link SocketManager}.
*/
public static SocketManager socketManager = null;
/**
* {@link Logger}.
*/
private static Logger logger = Logger.getLogger(RawOfficeFloorMain.class.getName());
/**
* Run application.
*/
public static void main(String[] args) throws Exception {
// Obtain the port from properties
int port = args.length > 0 ? Integer.parseInt(args[0]) : 8080;
// Ensure previous socket manager shutdown (typically from tests)
if (socketManager != null) {
socketManager.shutdown();
}
// Indicate details
String server = System.getProperty("OFFICE.net_officefloor_jdbc_DataSourceManagedObjectSource.server",
"tfb-database");
System.out.println("Starting server on port " + port + " talking to database " + server);
// Increase the buffer size (note: too high and cause OOM issues)
System.setProperty("reactor.bufferSize.small", String.valueOf(QUERY_BUFFER_SIZE));
// Create the server location
HttpServerLocation serverLocation = new HttpServerLocationImpl("localhost", port, -1);
// Create a thread factory per logical CPU
ThreadCompletionListener[] threadCompletionListenerCapture = new ThreadCompletionListener[] { null };
List<ThreadFactory> threadFactories = new LinkedList<>();
for (CpuCore cpuCore : CpuCore.getCores()) {
for (CpuCore.LogicalCpu logicalCpu : cpuCore.getCpus()) {
// Create thread factory for logical CPU
ThreadFactory boundThreadFactory = (runnable) -> new Thread(() -> {
ThreadLocalStreamBufferPool bufferPool = (ThreadLocalStreamBufferPool) threadCompletionListenerCapture[0];
try {
// Bind thread to logical CPU
Affinity.setAffinity(logicalCpu.getCpuAffinity());
// Set up for thread local buffer pooling
bufferPool.activeThreadLocalPooling();
// Run logic for thread
runnable.run();
} finally {
bufferPool.threadComplete();
}
});
// Add the thread factory
threadFactories.add(boundThreadFactory);
}
}
ThreadFactory[] executionStrategy = threadFactories.toArray(new ThreadFactory[0]);
System.out.println("Using " + executionStrategy.length + " executors");
// Create the socket manager
socketManager = HttpServerSocketManagedObjectSource.createSocketManager(executionStrategy,
(threadCompletionListener) -> threadCompletionListenerCapture[0] = threadCompletionListener);
// Must have enough connection capacity for initial load (+1 for rounding)
int requiredConnectionsPerSocket = (QUERY_LOAD_CAPACITY / (executionStrategy.length * QUERY_BUFFER_SIZE)) + 1;
int connectionsPerSocket = Math.max(4, requiredConnectionsPerSocket);
System.out.println("Using " + connectionsPerSocket + " connections per socket");
// Determine the pool size for connections
int connectionPoolSize = executionStrategy.length * connectionsPerSocket;
// Build the connection pool
ConnectionFactoryOptions factoryOptions = ConnectionFactoryOptions.builder()
.option(ConnectionFactoryOptions.DRIVER, "pool").option(ConnectionFactoryOptions.PROTOCOL, "postgresql")
.option(ConnectionFactoryOptions.HOST, server).option(ConnectionFactoryOptions.PORT, 5432)
.option(ConnectionFactoryOptions.DATABASE, "hello_world")
.option(ConnectionFactoryOptions.USER, "benchmarkdbuser")
.option(ConnectionFactoryOptions.PASSWORD, "benchmarkdbpass")
.option(PoolingConnectionFactoryProvider.MAX_SIZE, connectionPoolSize).build();
ConnectionFactory connectionFactory = ConnectionFactories.get(factoryOptions);
// Create raw HTTP servicing
RawHttpServicerFactory serviceFactory = new RawHttpServicerFactory(serverLocation, connectionFactory,
connectionsPerSocket);
socketManager.bindServerSocket(serverLocation.getClusterHttpPort(), null, null, serviceFactory, serviceFactory);
// Setup Date
ScheduledExecutorService dateTimer = Executors.newScheduledThreadPool(1);
dateTimer.scheduleAtFixedRate(serviceFactory.updateDate, 0, 1, TimeUnit.SECONDS);
// Start servicing
Runnable[] runnables = socketManager.getRunnables();
for (int i = 0; i < runnables.length; i++) {
executionStrategy[i].newThread(runnables[i]).start();
}
Thread.sleep(1000); // allow threads to start up
// Indicate running
System.out.println("OfficeFloor raw running on port " + serverLocation.getClusterHttpPort());
}
/**
* Raw {@link AbstractHttpServicerFactory}.
*/
private static class RawHttpServicerFactory extends AbstractHttpServicerFactory {
private static HttpHeaderName NAME_SERVER = new HttpHeaderName("Server");
private static HttpHeaderValue VALUE_SERVER = new HttpHeaderValue("O");
private static HttpHeaderName NAME_DATE = new HttpHeaderName("Date");
private static byte[] HELLO_WORLD = "Hello, World!".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final HttpHeaderValue APPLICATION_JSON = new HttpHeaderValue("application/json");
private static final HttpHeaderValue TEXT_PLAIN = new HttpHeaderValue("text/plain");
private static final HttpHeaderValue TEXT_HTML = new HttpHeaderValue("text/html;charset=utf-8");
private static final String QUERIES_PATH_PREFIX = "/queries?queries=";
private static final String UPDATE_PATH_PREFIX = "/update?queries=";
private static final byte[] TEMPLATE_START = "<!DOCTYPE html><html><head><title>Fortunes</title></head><body><table><tr><th>id</th><th>message</th></tr>"
.getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final byte[] FORTUNE_START = "<tr><td>".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final byte[] FORTUNE_MIDDLE = "</td><td>".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final byte[] FORTUNE_END = "</td></tr>".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final byte[] TEMPLATE_END = "</table></body></html>"
.getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET);
private static final R2dbcTransientResourceException THROTTLED = new R2dbcTransientResourceException();
/**
* <code>Date</code> {@link HttpHeaderValue}.
*/
private volatile HttpHeaderValue dateHttpHeader;
private final Runnable updateDate = () -> {
String now = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now(ZoneOffset.UTC));
RawHttpServicerFactory.this.dateHttpHeader = new HttpHeaderValue(now);
};
/**
* {@link ObjectMapper}.
*/
private final ObjectMapper objectMapper = new ObjectMapper();
/**
* {@link ManagedObjectContext}.
*/
private static ManagedObjectContext managedObjectContext = new ManagedObjectContext() {
@Override
public String getBoundName() {
return RawOfficeFloorMain.class.getSimpleName();
}
@Override
public Logger getLogger() {
return logger;
}
@Override
public <R, T extends Throwable> R run(ProcessSafeOperation<R, T> operation) throws T {
return operation.run();
}
};
/**
* {@link ConnectionFactory}.
*/
private final ConnectionFactory connectionFactory;
/**
* {@link ThreadLocal} {@link Connection} instances.
*/
private final ThreadLocal<Connection[]> threadLocalConnections;
/**
* {@link ThreadLocal} {@link RateLimit}.
*/
private final ThreadLocal<RateLimit> threadLocalRateLimit = new ThreadLocal<RateLimit>();
/**
* Instantiate.
*
* @param serverLocation {@link HttpServerLocation}.
* @param connectionFactory {@link ConnectionFactory}.
* @param connectionsPerSocket Number of DB connections per socket.
*/
public RawHttpServicerFactory(HttpServerLocation serverLocation, ConnectionFactory connectionFactory,
int connectionsPerSocket) {
super(serverLocation, false, new HttpRequestParserMetaData(100, 1000, 1000000), null, null, true);
this.objectMapper.registerModule(new AfterburnerModule());
this.connectionFactory = connectionFactory;
// Create thread local connection
this.threadLocalConnections = new ThreadLocal<Connection[]>() {
@Override
protected Connection[] initialValue() {
Connection[] connections = new Connection[connectionsPerSocket];
for (int i = 0; i < connections.length; i++) {
connections[i] = Mono.from(RawHttpServicerFactory.this.connectionFactory.create()).block();
}
return connections;
}
};
}
/**
* Sends the {@link HttpResponse}.
*
* @param connection {@link ServerHttpConnection}.
* @throws IOException If fails to send.
*/
protected void send(ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) throws IOException {
try {
connection.getServiceFlowCallback().run(null);
} catch (IOException ex) {
throw ex;
} catch (Throwable ex) {
throw new IOException(ex);
}
}
/*
* =============== SocketServicerFactory =================
*/
@Override
public SocketServicer<HttpRequestParser> createSocketServicer(
RequestHandler<HttpRequestParser> requestHandler) {
// Ensure rate limits for socket servicing thread
// Note: will always create before servicing any requests
if (this.threadLocalRateLimit.get() == null) {
Connection[] connections = this.threadLocalConnections.get();
RateLimit rateLimit = new RateLimit(requestHandler, connections);
this.threadLocalRateLimit.set(rateLimit);
}
// Continue on to create socket servicer
return super.createSocketServicer(requestHandler);
}
/*
* ===================== HttpServicer ====================
*/
@Override
protected ProcessManager service(ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection)
throws IOException {
// Configure context
connection.setManagedObjectContext(managedObjectContext);
// Service the connection
HttpRequest request = connection.getRequest();
HttpResponse response = connection.getResponse();
// Provider Server and Date
HttpResponseHeaders headers = response.getHeaders();
headers.addHeader(NAME_SERVER, VALUE_SERVER);
headers.addHeader(NAME_DATE, this.dateHttpHeader);
// Determine request
String requestUri = request.getUri();
switch (requestUri) {
case "/plaintext":
this.plaintext(response, connection);
break;
case "/json":
this.json(response, connection);
break;
case "/db":
this.db(response, connection);
break;
case "/fortunes":
this.fortunes(response, connection);
break;
default:
// Provide redirect
if (requestUri.startsWith(QUERIES_PATH_PREFIX)) {
this.queries(requestUri, response, connection);
} else if (requestUri.startsWith(UPDATE_PATH_PREFIX)) {
this.update(requestUri, response, connection);
} else {
// Unknown request
response.setStatus(HttpStatus.NOT_FOUND);
this.send(connection);
}
break;
}
// No process management
return null;
}
private void plaintext(HttpResponse response,
ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) throws IOException {
response.setContentType(TEXT_PLAIN, null);
response.getEntity().write(HELLO_WORLD);
this.send(connection);
}
private void json(HttpResponse response, ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection)
throws IOException {
response.setContentType(APPLICATION_JSON, null);
this.objectMapper.writeValue(response.getEntityWriter(), new Message("Hello, World!"));
this.send(connection);
}
private void db(HttpResponse response, ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) {
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1);
if (conn == null) {
this.sendError(connection, THROTTLED);
return; // rate limited
}
// Service
Mono.from(conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Mono.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).publishOn(conn.writeScheduler).subscribe(world -> {
try {
response.setContentType(APPLICATION_JSON, null);
this.objectMapper.writeValue(response.getEntityWriter(), world);
this.send(connection);
} catch (CancelledKeyException | ClosedChannelException ex) {
// Ignore as disconnecting client
} catch (IOException ex) {
ex.printStackTrace();
}
}, error -> {
this.sendError(connection, error);
}, () -> {
conn.processed(1);
});
}
private void queries(String requestUri, HttpResponse response,
ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) {
// Obtain the number of queries
String queriesCountText = requestUri.substring(QUERIES_PATH_PREFIX.length());
int queryCount = getQueryCount(queriesCountText);
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(queryCount);
if (conn == null) {
this.sendError(connection, THROTTLED);
return; // rate limited
}
// Service
Flux.range(1, queryCount)
.flatMap(
index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).collectList().publishOn(conn.writeScheduler).subscribe(worlds -> {
try {
response.setContentType(APPLICATION_JSON, null);
this.objectMapper.writeValue(response.getEntityWriter(), worlds);
this.send(connection);
} catch (CancelledKeyException | ClosedChannelException ex) {
// Ignore as disconnecting client
} catch (IOException ex) {
ex.printStackTrace();
}
}, error -> {
this.sendError(connection, error);
}, () -> {
conn.processed(queryCount);
});
}
private void fortunes(HttpResponse response,
ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) {
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1);
if (conn == null) {
this.sendError(connection, THROTTLED);
return; // rate limited
}
// Service
Flux.from(conn.connection.createStatement("SELECT ID, MESSAGE FROM FORTUNE").execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
String message = row.get(1, String.class);
return new Fortune(id, message);
}))).collectList().publishOn(conn.writeScheduler).subscribe(fortunes -> {
try {
// Additional fortunes
fortunes.add(new Fortune(0, "Additional fortune added at request time."));
Collections.sort(fortunes, (a, b) -> a.message.compareTo(b.message));
// Send response
response.setContentType(TEXT_HTML, null);
ServerWriter writer = response.getEntityWriter();
writer.write(TEMPLATE_START);
for (Fortune fortune : fortunes) {
writer.write(FORTUNE_START);
int id = fortune.getId();
writer.write(Integer.valueOf(id).toString());
writer.write(FORTUNE_MIDDLE);
StringEscapeUtils.ESCAPE_HTML4.translate(fortune.getMessage(), writer);
writer.write(FORTUNE_END);
}
writer.write(TEMPLATE_END);
this.send(connection);
} catch (CancelledKeyException | ClosedChannelException ex) {
// Ignore as disconnecting client
} catch (IOException ex) {
ex.printStackTrace();
}
}, error -> {
this.sendError(connection, error);
}, () -> {
conn.processed(1);
});
}
private void update(String requestUri, HttpResponse response,
ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection) {
// Obtain the number of queries
String queriesCountText = requestUri.substring(UPDATE_PATH_PREFIX.length());
int queryCount = getQueryCount(queriesCountText);
int executeQueryCount = queryCount + 1; // select all and update
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(executeQueryCount);
if (conn == null) {
this.sendError(connection, THROTTLED);
return; // rate limited
}
// Service
Flux.range(1, queryCount)
.flatMap(
index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).collectList().flatMap(worlds -> {
Collections.sort(worlds, (a, b) -> a.id - b.id);
Batch batch = conn.connection.createBatch();
for (World world : worlds) {
world.randomNumber = ThreadLocalRandom.current().nextInt(1, 10001);
batch.add("UPDATE WORLD SET RANDOMNUMBER = " + world.randomNumber + " WHERE ID = "
+ world.id);
}
return Mono.from(batch.execute()).map((result) -> worlds);
}).publishOn(conn.writeScheduler).subscribe(worlds -> {
try {
response.setContentType(APPLICATION_JSON, null);
this.objectMapper.writeValue(response.getEntityWriter(), worlds);
this.send(connection);
} catch (CancelledKeyException | ClosedChannelException ex) {
// Ignore as disconnecting client
} catch (IOException ex) {
ex.printStackTrace();
}
}, error -> {
this.sendError(connection, error);
}, () -> {
conn.processed(executeQueryCount);
});
}
private void sendError(ProcessAwareServerHttpConnectionManagedObject<ByteBuffer> connection,
Throwable failure) {
try {
// Setup to send response
HttpResponse response = connection.getResponse();
response.reset();
// Determine type of error
if (failure instanceof R2dbcTransientResourceException) {
// Indicate overloaded
response.setStatus(HttpStatus.SERVICE_UNAVAILABLE);
} else {
// Provide details of failure
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR);
response.setContentType(TEXT_PLAIN, null);
failure.printStackTrace(new PrintWriter(response.getEntityWriter()));
}
// Send error response
this.send(connection);
} catch (CancelledKeyException | ClosedChannelException ex) {
// Ignore as disconnecting client
} catch (IOException ex) {
ex.printStackTrace();
}
}
private static int getQueryCount(String queries) {
try {
int count = Integer.parseInt(queries);
return (count < 1) ? 1 : (count > 500) ? 500 : count;
} catch (NumberFormatException ex) {
return 1;
}
}
}
private static class RateLimit {
private final RateLimitedConnection[] rateLimitedConnections;
private final Executor socketExecutor;
private RateLimit(RequestHandler<HttpRequestParser> requestHandler, Connection[] connections) {
// Create the write scheduler
this.socketExecutor = (runnable) -> requestHandler.execute(() -> {
runnable.run();
});
Scheduler writeScheduler = Schedulers.fromExecutor(this.socketExecutor);
// Create the rate limited connections
this.rateLimitedConnections = new RateLimitedConnection[connections.length];
for (int i = 0; i < this.rateLimitedConnections.length; i++) {
this.rateLimitedConnections[i] = new RateLimitedConnection(connections[i], writeScheduler);
}
}
private RateLimitedConnection getAvailableConnection(int queryCount) {
// Determine available connection for limit
for (int i = 0; i < this.rateLimitedConnections.length; i++) {
RateLimitedConnection connection = this.rateLimitedConnections[i];
// Determine if query count reached
int newCount = connection.activeQueries + queryCount;
if (newCount <= QUERY_BUFFER_SIZE) {
// Connection available for load
connection.activeQueries = newCount;
return connection;
}
}
// As here, no available connection
return null;
}
}
private static class RateLimitedConnection {
private final Scheduler writeScheduler;
private final Connection connection;
private int activeQueries;
private RateLimitedConnection(Connection connection, Scheduler writeScheduler) {
this.connection = connection;
this.writeScheduler = writeScheduler;
}
private void processed(int queryCount) {
// Update the active queries
this.activeQueries -= queryCount;
}
}
@Data
public static class Message {
private final String message;
}
@Data
@AllArgsConstructor
public static class World {
private final int id;
private int randomNumber;
}
@Data
public static class Fortune {
private final int id;
private final String message;
}
}
| |
// DBPort.java
/**
* Copyright (C) 2008 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb;
import java.io.*;
import java.net.*;
import java.util.*;
import java.util.logging.*;
import org.bson.*;
import com.mongodb.util.*;
public class DBPort {
public static final int PORT = 27017;
static final boolean USE_NAGLE = false;
static final long CONN_RETRY_TIME_MS = 15000;
public DBPort( InetSocketAddress addr ){
this( addr , null , new MongoOptions() );
}
DBPort( InetSocketAddress addr , DBPortPool pool , MongoOptions options ){
_options = options;
_addr = addr;
_pool = pool;
_hashCode = _addr.hashCode();
_logger = Logger.getLogger( _rootLogger.getName() + "." + addr.toString() );
}
/**
* @param response will get wiped
*/
Response call( OutMessage msg , DBCollection coll )
throws IOException {
return go( msg , coll );
}
void say( OutMessage msg )
throws IOException {
go( msg , null );
}
private synchronized Response go( OutMessage msg , DBCollection coll )
throws IOException {
return go( msg , coll , false );
}
private synchronized Response go( OutMessage msg , DBCollection coll , boolean forceReponse )
throws IOException {
if ( _processingResponse ){
if ( coll == null ){
// this could be a pipeline and should be safe
}
else {
// this could cause issues since we're reading data off the wire
throw new IllegalStateException( "DBPort.go called and expecting a response while processing another response" );
}
}
_calls++;
if ( _socket == null )
_open();
if ( _out == null )
throw new IllegalStateException( "_out shouldn't be null" );
try {
msg.prepare();
msg.pipe( _out );
if ( _pool != null )
_pool._everWorked = true;
if ( coll == null && ! forceReponse )
return null;
_processingResponse = true;
return new Response( coll , _in , _decoder);
}
catch ( IOException ioe ){
close();
throw ioe;
}
finally {
_processingResponse = false;
}
}
synchronized CommandResult getLastError( DB db , WriteConcern concern){
DBApiLayer dbAL = (DBApiLayer) db;
return runCommand( dbAL , concern.getCommand() );
}
synchronized DBObject findOne( DB db , String coll , DBObject q ){
OutMessage msg = OutMessage.query( db._mongo , 0 , db.getName() + "." + coll , 0 , -1 , q , null );
try {
Response res = go( msg , db.getCollection( coll ) );
if ( res.size() == 0 )
return null;
if ( res.size() > 1 )
throw new MongoInternalException( "something is wrong. size:" + res.size() );
return res.get(0);
}
catch ( IOException ioe ){
throw new MongoInternalException( "DBPort.findOne failed" , ioe );
}
}
synchronized CommandResult runCommand( DB db , DBObject cmd ) {
DBObject res = findOne( db , "$cmd" , cmd );
if ( res == null )
throw new MongoInternalException( "something is wrong, no command result" );
return (CommandResult)res;
}
synchronized DBObject findOne( String ns , DBObject q ){
OutMessage msg = OutMessage.query( null , 0 , ns , 0 , -1 , q , null );
try {
Response res = go( msg , null , true );
if ( res.size() == 0 )
return null;
if ( res.size() > 1 )
throw new MongoInternalException( "something is wrong. size:" + res.size() );
return res.get(0);
}
catch ( IOException ioe ){
throw new MongoInternalException( "DBPort.findOne failed" , ioe );
}
}
synchronized CommandResult runCommand( String db , DBObject cmd ) {
DBObject res = findOne( db + ".$cmd" , cmd );
if ( res == null )
throw new MongoInternalException( "something is wrong, no command result" );
CommandResult cr = new CommandResult();
cr.putAll( res );
return cr;
}
synchronized CommandResult tryGetLastError( DB db , long last, WriteConcern concern){
if ( last != _calls )
return null;
return getLastError( db , concern );
}
public synchronized void ensureOpen()
throws IOException {
if ( _socket != null )
return;
_open();
}
boolean _open()
throws IOException {
long sleepTime = 100;
final long start = System.currentTimeMillis();
while ( true ){
IOException lastError = null;
try {
_socket = new Socket();
_socket.connect( _addr , _options.connectTimeout );
_socket.setTcpNoDelay( ! USE_NAGLE );
_socket.setSoTimeout( _options.socketTimeout );
_in = new BufferedInputStream( _socket.getInputStream() );
_out = _socket.getOutputStream();
return true;
}
catch ( IOException ioe ){
lastError = new IOException( "couldn't connect to [" + _addr + "] bc:" + ioe );
_logger.log( Level.INFO , "connect fail to : " + _addr , ioe );
close();
}
if ( ! _options.autoConnectRetry || ( _pool != null && ! _pool._everWorked ) )
throw lastError;
long sleptSoFar = System.currentTimeMillis() - start;
if ( sleptSoFar >= CONN_RETRY_TIME_MS )
throw lastError;
if ( sleepTime + sleptSoFar > CONN_RETRY_TIME_MS )
sleepTime = CONN_RETRY_TIME_MS - sleptSoFar;
_logger.severe( "going to sleep and retry. total sleep time after = " + ( sleptSoFar + sleptSoFar ) + "ms this time:" + sleepTime + "ms" );
ThreadUtil.sleep( sleepTime );
sleepTime *= 2;
}
}
public int hashCode(){
return _hashCode;
}
public String host(){
return _addr.toString();
}
public String toString(){
return "{DBPort " + host() + "}";
}
protected void finalize(){
close();
}
protected void close(){
_authed.clear();
if ( _socket != null ){
try {
_socket.close();
}
catch ( Exception e ){
// don't care
}
}
_in = null;
_out = null;
_socket = null;
}
void checkAuth( DB db ){
if ( db._username == null ){
if ( db._name.equals( "admin" ) )
return;
checkAuth( db._mongo.getDB( "admin" ) );
return;
}
if ( _authed.containsKey( db ) )
return;
CommandResult res = runCommand( db , new BasicDBObject( "getnonce" , 1 ) );
res.throwOnError();
DBObject temp = db._authCommand( res.getString( "nonce" ) );
res = runCommand( db , temp );
if ( ! res.ok() )
throw new MongoInternalException( "couldn't re-auth" );
_authed.put( db , true );
}
final int _hashCode;
final InetSocketAddress _addr;
final DBPortPool _pool;
final MongoOptions _options;
final Logger _logger;
final BSONDecoder _decoder = new BSONDecoder();
private Socket _socket;
private InputStream _in;
private OutputStream _out;
private boolean _processingResponse;
private Map<DB,Boolean> _authed = Collections.synchronizedMap( new WeakHashMap<DB,Boolean>() );
int _lastThread;
long _calls = 0;
private static Logger _rootLogger = Logger.getLogger( "com.mongodb.port" );
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.client.cache;
import java.io.IOException;
import java.lang.reflect.Proxy;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.http.HttpEntity;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.util.EntityUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestSizeLimitedResponseReader {
private static final long MAX_SIZE = 4;
private HttpRequest request;
private SizeLimitedResponseReader impl;
@Before
public void setUp() {
request = new HttpGet("http://foo.example.com/bar");
}
@Test
public void testLargeResponseIsTooLarge() throws Exception {
final byte[] buf = new byte[] { 1, 2, 3, 4, 5 };
final CloseableHttpResponse response = make200Response(buf);
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final HttpResponse result = impl.getReconstructedResponse();
final byte[] body = EntityUtils.toByteArray(result.getEntity());
Assert.assertTrue(tooLarge);
Assert.assertArrayEquals(buf, body);
}
@Test
public void testExactSizeResponseIsNotTooLarge() throws Exception {
final byte[] buf = new byte[] { 1, 2, 3, 4 };
final CloseableHttpResponse response = make200Response(buf);
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final HttpResponse reconstructed = impl.getReconstructedResponse();
final byte[] result = EntityUtils.toByteArray(reconstructed.getEntity());
Assert.assertFalse(tooLarge);
Assert.assertArrayEquals(buf, result);
}
@Test
public void testSmallResponseIsNotTooLarge() throws Exception {
final byte[] buf = new byte[] { 1, 2, 3 };
final CloseableHttpResponse response = make200Response(buf);
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final HttpResponse reconstructed = impl.getReconstructedResponse();
final byte[] result = EntityUtils.toByteArray(reconstructed.getEntity());
Assert.assertFalse(tooLarge);
Assert.assertArrayEquals(buf, result);
}
@Test
public void testResponseWithNoEntityIsNotTooLarge() throws Exception {
final CloseableHttpResponse response = make200Response();
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
Assert.assertFalse(tooLarge);
}
@Test
public void testTooLargeEntityHasOriginalContentTypes() throws Exception {
final CloseableHttpResponse response = make200Response();
final StringEntity entity = new StringEntity("large entity content");
response.setEntity(entity);
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final HttpResponse result = impl.getReconstructedResponse();
final HttpEntity reconstructedEntity = result.getEntity();
Assert.assertEquals(entity.getContentEncoding(), reconstructedEntity.getContentEncoding());
Assert.assertEquals(entity.getContentType(), reconstructedEntity.getContentType());
final String content = EntityUtils.toString(reconstructedEntity);
Assert.assertTrue(tooLarge);
Assert.assertEquals("large entity content", content);
}
@Test
public void testTooLargeResponseCombinedClosed() throws Exception {
final AtomicBoolean closed = new AtomicBoolean(false);
final CloseableHttpResponse response = (CloseableHttpResponse) Proxy
.newProxyInstance(ResponseProxyHandler.class.getClassLoader(),
new Class<?>[] { CloseableHttpResponse.class },
new ResponseProxyHandler(new BasicHttpResponse(
HttpVersion.HTTP_1_1, HttpStatus.SC_OK, "OK")) {
@Override
public void close() throws IOException {
closed.set(true);
}
});
final StringEntity entity = new StringEntity("large entity content");
response.setEntity(entity);
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final CloseableHttpResponse result = impl.getReconstructedResponse();
try {
final HttpEntity reconstructedEntity = result.getEntity();
Assert.assertEquals(entity.getContentEncoding(), reconstructedEntity.getContentEncoding());
Assert.assertEquals(entity.getContentType(), reconstructedEntity.getContentType());
Assert.assertFalse(closed.get());
final String content = EntityUtils.toString(reconstructedEntity);
Assert.assertTrue(tooLarge);
Assert.assertEquals("large entity content", content);
} finally {
result.close();
}
Assert.assertTrue(closed.get());
}
@Test
public void testResponseCopiesAllOriginalHeaders() throws Exception {
final byte[] buf = new byte[] { 1, 2, 3 };
final CloseableHttpResponse response = make200Response(buf);
response.setHeader("Content-Encoding", "gzip");
impl = new SizeLimitedResponseReader(new HeapResourceFactory(), MAX_SIZE, request, response);
impl.readResponse();
final boolean tooLarge = impl.isLimitReached();
final HttpResponse reconstructed = impl.getReconstructedResponse();
final byte[] result = EntityUtils.toByteArray(reconstructed.getEntity());
Assert.assertFalse(tooLarge);
Assert.assertArrayEquals(buf, result);
Assert.assertEquals("gzip", reconstructed.getFirstHeader("Content-Encoding").getValue());
}
private CloseableHttpResponse make200Response() {
return Proxies.enhanceResponse(new BasicHttpResponse(
HttpVersion.HTTP_1_1, HttpStatus.SC_OK, "OK"));
}
private CloseableHttpResponse make200Response(final byte[] buf) {
final HttpResponse response = new BasicHttpResponse(
HttpVersion.HTTP_1_1, HttpStatus.SC_OK, "OK");
response.setEntity(new ByteArrayEntity(buf));
return Proxies.enhanceResponse(response);
}
}
| |
package org.hibernate.test.sql.hand.query;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import junit.framework.Test;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.type.FloatType;
import org.hibernate.type.LongType;
import org.hibernate.type.StringType;
import org.hibernate.type.TimestampType;
import org.hibernate.util.ArrayHelper;
import org.hibernate.test.sql.hand.Organization;
import org.hibernate.test.sql.hand.Person;
import org.hibernate.test.sql.hand.Employment;
import org.hibernate.test.sql.hand.Product;
import org.hibernate.test.sql.hand.Order;
import org.hibernate.test.sql.hand.Dimension;
import org.hibernate.test.sql.hand.SpaceShip;
import org.hibernate.test.sql.hand.Speech;
import org.hibernate.test.sql.hand.Group;
import org.hibernate.test.sql.hand.TextHolder;
import org.hibernate.test.sql.hand.ImageHolder;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.testing.junit.functional.FunctionalTestCase;
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
import org.hibernate.transform.DistinctRootEntityResultTransformer;
import org.hibernate.transform.Transformers;
import org.hibernate.transform.BasicTransformerAdapter;
/**
* Tests of various features of native SQL queries.
*
* @author Steve Ebersole
*/
@SuppressWarnings({ "UnnecessaryBoxing", "UnnecessaryUnboxing" })
public class NativeSQLQueriesTest extends FunctionalTestCase {
public NativeSQLQueriesTest(String x) {
super( x );
}
public String[] getMappings() {
return new String[] { "sql/hand/query/NativeSQLQueries.hbm.xml" };
}
public void configure(Configuration cfg) {
super.configure( cfg );
cfg.setProperty( Environment.GENERATE_STATISTICS, "true" );
}
public static Test suite() {
return new FunctionalTestClassTestSuite( NativeSQLQueriesTest.class );
}
protected String getOrganizationFetchJoinEmploymentSQL() {
return "SELECT org.ORGID as {org.id}, " +
" org.NAME as {org.name}, " +
" emp.EMPLOYER as {emp.key}, " +
" emp.EMPID as {emp.element}, " +
" {emp.element.*} " +
"FROM ORGANIZATION org " +
" LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER";
}
protected String getOrganizationJoinEmploymentSQL() {
return "SELECT org.ORGID as {org.id}, " +
" org.NAME as {org.name}, " +
" {emp.*} " +
"FROM ORGANIZATION org " +
" LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER";
}
protected String getEmploymentSQL() {
return "SELECT * FROM EMPLOYMENT";
}
protected String getEmploymentSQLMixedScalarEntity() {
return "SELECT e.*, e.employer as employerid FROM EMPLOYMENT e" ;
}
protected String getOrgEmpRegionSQL() {
return "select {org.*}, {emp.*}, emp.REGIONCODE " +
"from ORGANIZATION org " +
" left outer join EMPLOYMENT emp on org.ORGID = emp.EMPLOYER";
}
protected String getOrgEmpPersonSQL() {
return "select {org.*}, {emp.*}, {pers.*} " +
"from ORGANIZATION org " +
" join EMPLOYMENT emp on org.ORGID = emp.EMPLOYER " +
" join PERSON pers on pers.PERID = emp.EMPLOYEE ";
}
protected String getDescriptionsSQL() {
return "select DESCRIPTION from TEXTHOLDER";
}
protected String getPhotosSQL() {
return "select PHOTO from IMAGEHOLDER";
}
public void testFailOnNoAddEntityOrScalar() {
// Note: this passes, but for the wrong reason.
// there is actually an exception thrown, but it is the database
// throwing a sql exception because the SQL gets passed
// "un-processed"...
//
// Oddly, H2 accepts this query.
if ( H2Dialect.class.isInstance( getDialect() ) ) {
return;
}
Session s = openSession();
s.beginTransaction();
try {
String sql = "select {org.*} " +
"from organization org";
s.createSQLQuery( sql ).list();
fail( "Should throw an exception since no addEntity nor addScalar has been performed." );
}
catch( HibernateException he) {
// expected behavior
}
finally {
s.getTransaction().rollback();
s.close();
}
}
public void testManualSynchronization() {
Session s = openSession();
s.beginTransaction();
sfi().getStatistics().clear();
// create an Organization...
Organization jboss = new Organization( "JBoss" );
s.persist( jboss );
// now query on Employment, this should not cause an auto-flush
s.createSQLQuery( getEmploymentSQL() ).list();
assertEquals( 0, sfi().getStatistics().getEntityInsertCount() );
// now try to query on Employment but this time add Organization as a synchronized query space...
s.createSQLQuery( getEmploymentSQL() ).addSynchronizedEntityClass( Organization.class ).list();
assertEquals( 1, sfi().getStatistics().getEntityInsertCount() );
// clean up
s.delete( jboss );
s.getTransaction().commit();
s.close();
}
public void testSQLQueryInterface() {
Session s = openSession();
Transaction t = s.beginTransaction();
Organization ifa = new Organization("IFA");
Organization jboss = new Organization("JBoss");
Person gavin = new Person("Gavin");
Employment emp = new Employment(gavin, jboss, "AU");
s.persist(ifa);
s.persist(jboss);
s.persist(gavin);
s.persist(emp);
List l = s.createSQLQuery( getOrgEmpRegionSQL() )
.addEntity("org", Organization.class)
.addJoin("emp", "org.employments")
.addScalar("regionCode", Hibernate.STRING)
.list();
assertEquals( 2, l.size() );
l = s.createSQLQuery( getOrgEmpPersonSQL() )
.addEntity("org", Organization.class)
.addJoin("emp", "org.employments")
.addJoin("pers", "emp.employee")
.list();
assertEquals( l.size(), 1 );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
l = s.createSQLQuery( "select {org.*}, {emp.*} " +
"from ORGANIZATION org " +
" left outer join EMPLOYMENT emp on org.ORGID = emp.EMPLOYER, ORGANIZATION org2" )
.addEntity("org", Organization.class)
.addJoin("emp", "org.employments")
.setResultTransformer( DistinctRootEntityResultTransformer.INSTANCE )
.list();
assertEquals( l.size(), 2 );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
s.delete(emp);
s.delete(gavin);
s.delete(ifa);
s.delete(jboss);
t.commit();
s.close();
}
public void testResultSetMappingDefinition() {
Session s = openSession();
Transaction t = s.beginTransaction();
Organization ifa = new Organization("IFA");
Organization jboss = new Organization("JBoss");
Person gavin = new Person("Gavin");
Employment emp = new Employment(gavin, jboss, "AU");
s.persist(ifa);
s.persist(jboss);
s.persist(gavin);
s.persist(emp);
List l = s.createSQLQuery( getOrgEmpRegionSQL() )
.setResultSetMapping( "org-emp-regionCode" )
.list();
assertEquals( l.size(), 2 );
l = s.createSQLQuery( getOrgEmpPersonSQL() )
.setResultSetMapping( "org-emp-person" )
.list();
assertEquals( l.size(), 1 );
s.delete(emp);
s.delete(gavin);
s.delete(ifa);
s.delete(jboss);
t.commit();
s.close();
}
public void testScalarValues() throws Exception {
Session s = openSession();
Transaction t = s.beginTransaction();
Organization ifa = new Organization( "IFA" );
Organization jboss = new Organization( "JBoss" );
Serializable idIfa = s.save( ifa );
Serializable idJBoss = s.save( jboss );
s.flush();
List result = s.getNamedQuery( "orgNamesOnly" ).list();
assertTrue( result.contains( "IFA" ) );
assertTrue( result.contains( "JBoss" ) );
result = s.getNamedQuery( "orgNamesOnly" ).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP).list();
Map m = (Map) result.get(0);
assertEquals( 2, result.size() );
assertEquals( 1, m.size() );
assertTrue( m.containsKey("NAME") );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Iterator iter = s.getNamedQuery( "orgNamesAndOrgs" ).list().iterator();
Object[] o = ( Object[] ) iter.next();
assertEquals( o[0], "IFA" );
assertEquals( ( ( Organization ) o[1] ).getName(), "IFA" );
o = ( Object[] ) iter.next();
assertEquals( o[0], "JBoss" );
assertEquals( ( ( Organization ) o[1] ).getName(), "JBoss" );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
// test that the ordering of the results is truly based on the order in which they were defined
iter = s.getNamedQuery( "orgsAndOrgNames" ).list().iterator();
Object[] row = ( Object[] ) iter.next();
assertEquals( "expecting non-scalar result first", Organization.class, row[0].getClass() );
assertEquals( "expecting scalar result second", String.class, row[1].getClass() );
assertEquals( ( ( Organization ) row[0] ).getName(), "IFA" );
assertEquals( row[1], "IFA" );
row = ( Object[] ) iter.next();
assertEquals( "expecting non-scalar result first", Organization.class, row[0].getClass() );
assertEquals( "expecting scalar result second", String.class, row[1].getClass() );
assertEquals( ( ( Organization ) row[0] ).getName(), "JBoss" );
assertEquals( row[1], "JBoss" );
assertFalse( iter.hasNext() );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
iter = s.getNamedQuery( "orgIdsAndOrgNames" ).list().iterator();
o = ( Object[] ) iter.next();
assertEquals( o[1], "IFA" );
assertEquals( o[0], idIfa );
o = ( Object[] ) iter.next();
assertEquals( o[1], "JBoss" );
assertEquals( o[0], idJBoss );
s.delete( ifa );
s.delete( jboss );
t.commit();
s.close();
}
public void testMappedAliasStrategy() {
Session s = openSession();
Transaction t = s.beginTransaction();
Organization ifa = new Organization("IFA");
Organization jboss = new Organization("JBoss");
Person gavin = new Person("Gavin");
Employment emp = new Employment(gavin, jboss, "AU");
Serializable orgId = s.save(jboss);
Serializable orgId2 = s.save(ifa);
s.save(gavin);
s.save(emp);
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Query namedQuery = s.getNamedQuery("AllEmploymentAsMapped");
List list = namedQuery.list();
assertEquals(1,list.size());
Employment emp2 = (Employment) list.get(0);
assertEquals(emp2.getEmploymentId(), emp.getEmploymentId() );
assertEquals(emp2.getStartDate().getDate(), emp.getStartDate().getDate() );
assertEquals(emp2.getEndDate(), emp.getEndDate() );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Query sqlQuery = s.getNamedQuery("EmploymentAndPerson");
sqlQuery.setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP);
list = sqlQuery.list();
assertEquals(1,list.size() );
Object res = list.get(0);
assertClassAssignability(res.getClass(),Map.class);
Map m = (Map) res;
assertEquals(2,m.size());
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
sqlQuery = s.getNamedQuery( "organizationreturnproperty" );
sqlQuery.setResultTransformer( Transformers.ALIAS_TO_ENTITY_MAP );
list = sqlQuery.list();
assertEquals( 2,list.size() );
m = (Map) list.get(0);
assertEquals( 2, m.size() );
assertTrue( m.containsKey("org") );
assertTrue( m.containsKey("emp") );
assertClassAssignability( m.get("org").getClass(), Organization.class );
if ( jboss.getId() == ( (Organization) m.get("org") ).getId() ) {
assertClassAssignability( m.get("emp").getClass(), Employment.class );
}
Map m2 = (Map) list.get(1);
assertEquals( 2, m.size() );
assertTrue( m2.containsKey("org") );
assertTrue( m2.containsKey("emp") );
assertClassAssignability( m2.get("org").getClass(), Organization.class );
if ( jboss.getId() == ( (Organization) m2.get("org") ).getId() ) {
assertClassAssignability( m2.get("emp").getClass(), Employment.class );
}
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
namedQuery = s.getNamedQuery("EmploymentAndPerson");
list = namedQuery.list();
assertEquals(1,list.size() );
Object[] objs = (Object[]) list.get(0);
assertEquals(2, objs.length);
emp2 = (Employment) objs[0];
gavin = (Person) objs[1];
s.delete(emp2);
s.delete(jboss);
s.delete(gavin);
s.delete(ifa);
t.commit();
s.close();
}
/* test for native sql composite id joins which has never been implemented */
public void testCompositeIdJoinsFailureExpected() {
Session s = openSession();
Transaction t = s.beginTransaction();
Person person = new Person();
person.setName( "Noob" );
Product product = new Product();
product.setProductId( new Product.ProductId() );
product.getProductId().setOrgid( "x" );
product.getProductId().setProductnumber( "1234" );
product.setName( "Hibernate 3" );
Order order = new Order();
order.setOrderId( new Order.OrderId() );
order.getOrderId().setOrdernumber( "1" );
order.getOrderId().setOrgid( "y" );
product.getOrders().add( order );
order.setProduct( product );
order.setPerson( person );
s.save( product );
s.save( order);
s.save( person );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Product p = (Product) s.createQuery( "from Product p join fetch p.orders" ).list().get(0);
assertTrue(Hibernate.isInitialized( p.getOrders()));
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Object[] o = (Object[]) s.createSQLQuery( "select\r\n" +
" product.orgid as {product.id.orgid}," +
" product.productnumber as {product.id.productnumber}," +
" {prod_orders}.orgid as orgid3_1_,\r\n" +
" {prod_orders}.ordernumber as ordernum2_3_1_,\r\n" +
" product.name as {product.name}," +
" {prod_orders.element.*}" +
/*" orders.PROD_NO as PROD4_3_1_,\r\n" +
" orders.person as person3_1_,\r\n" +
" orders.PROD_ORGID as PROD3_0__,\r\n" +
" orders.PROD_NO as PROD4_0__,\r\n" +
" orders.orgid as orgid0__,\r\n" +
" orders.ordernumber as ordernum2_0__ \r\n" +*/
" from\r\n" +
" Product product \r\n" +
" inner join\r\n" +
" TBL_ORDER {prod_orders} \r\n" +
" on product.orgid={prod_orders}.PROD_ORGID \r\n" +
" and product.productnumber={prod_orders}.PROD_NO" )
.addEntity( "product", Product.class )
.addJoin( "prod_orders", "product.orders" )
.list().get(0);
p = (Product) o[0];
assertTrue(Hibernate.isInitialized( p.getOrders() ));
assertNotNull(p.getOrders().iterator().next());
t.commit();
s.close();
}
public void testAutoDetectAliasing() {
Session s = openSession();
Transaction t = s.beginTransaction();
Organization ifa = new Organization("IFA");
Organization jboss = new Organization("JBoss");
Person gavin = new Person("Gavin");
Employment emp = new Employment(gavin, jboss, "AU");
Serializable orgId = s.save(jboss);
Serializable orgId2 = s.save(ifa);
s.save(gavin);
s.save(emp);
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
List list = s.createSQLQuery( getEmploymentSQL() )
.addEntity( Employment.class.getName() )
.list();
assertEquals( 1,list.size() );
Employment emp2 = (Employment) list.get(0);
assertEquals(emp2.getEmploymentId(), emp.getEmploymentId() );
assertEquals(emp2.getStartDate().getDate(), emp.getStartDate().getDate() );
assertEquals(emp2.getEndDate(), emp.getEndDate() );
s.clear();
list = s.createSQLQuery( getEmploymentSQL() )
.addEntity( Employment.class.getName() )
.setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP)
.list();
assertEquals( 1,list.size() );
Map m = (Map) list.get(0);
assertTrue(m.containsKey("Employment"));
assertEquals(1,m.size());
list = s.createSQLQuery(getEmploymentSQL()).list();
assertEquals(1, list.size());
Object[] o = (Object[]) list.get(0);
assertEquals(8, o.length);
list = s.createSQLQuery( getEmploymentSQL() ).setResultTransformer( new UpperCasedAliasToEntityMapResultTransformer() ).list();
assertEquals(1, list.size());
m = (Map) list.get(0);
assertTrue(m.containsKey("EMPID"));
assertTrue(m.containsKey("VALUE"));
assertTrue(m.containsKey("ENDDATE"));
assertEquals(8, m.size());
list = s.createSQLQuery( getEmploymentSQLMixedScalarEntity() ).addScalar( "employerid" ).addEntity( Employment.class ).list();
assertEquals(1, list.size());
o = (Object[]) list.get(0);
assertEquals(2, o.length);
assertClassAssignability( o[0].getClass(), Number.class);
assertClassAssignability( o[1].getClass(), Employment.class);
Query queryWithCollection = s.getNamedQuery("organizationEmploymentsExplicitAliases");
queryWithCollection.setLong("id", jboss.getId() );
list = queryWithCollection.list();
assertEquals(list.size(),1);
s.clear();
list = s.createSQLQuery( getOrganizationJoinEmploymentSQL() )
.addEntity( "org", Organization.class )
.addJoin( "emp", "org.employments" )
.list();
assertEquals( 2,list.size() );
s.clear();
list = s.createSQLQuery( getOrganizationFetchJoinEmploymentSQL() )
.addEntity( "org", Organization.class )
.addJoin( "emp", "org.employments" )
.list();
assertEquals( 2,list.size() );
s.clear();
// TODO : why twice?
s.getNamedQuery( "organizationreturnproperty" ).list();
list = s.getNamedQuery( "organizationreturnproperty" ).list();
assertEquals( 2,list.size() );
s.clear();
list = s.getNamedQuery( "organizationautodetect" ).list();
assertEquals( 2,list.size() );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
s.delete(emp2);
s.delete(jboss);
s.delete(gavin);
s.delete(ifa);
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
Dimension dim = new Dimension( 3, Integer.MAX_VALUE );
s.save( dim );
list = s.createSQLQuery( "select d_len * d_width as surface, d_len * d_width * 10 as volume from Dimension" ).list();
s.delete( dim );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
SpaceShip enterprise = new SpaceShip();
enterprise.setModel( "USS" );
enterprise.setName( "Entreprise" );
enterprise.setSpeed( 50d );
Dimension d = new Dimension(45, 10);
enterprise.setDimensions( d );
s.save( enterprise );
Object[] result = (Object[]) s.getNamedQuery( "spaceship" ).uniqueResult();
enterprise = ( SpaceShip ) result[0];
assertTrue(50d == enterprise.getSpeed() );
assertTrue( 450d == extractDoubleValue( result[1] ) );
assertTrue( 4500d == extractDoubleValue( result[2] ) );
s.delete( enterprise );
t.commit();
s.close();
}
public void testExplicitReturnAPI() {
Session s = openSession();
s.beginTransaction();
Organization jboss = new Organization( "JBoss" );
Person me = new Person( "Steve" );
Employment emp = new Employment( me, jboss, "US" );
Serializable jbossId = s.save( jboss );
s.save( me );
s.save( emp );
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
String sql =
"SELECT org.ORGID as orgid," +
" org.NAME as name," +
" emp.EMPLOYER as employer," +
" emp.EMPID as empid," +
" emp.EMPLOYEE as employee," +
" emp.EMPLOYER as employer," +
" emp.STARTDATE as startDate," +
" emp.ENDDATE as endDate," +
" emp.REGIONCODE as regionCode," +
" emp.VALUE as VALUE," +
" emp.CURRENCY as CURRENCY" +
" FROM ORGANIZATION org" +
" LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER";
// as a control, lets apply an existing rs mapping
SQLQuery sqlQuery = s.createSQLQuery( sql );
sqlQuery.setResultSetMapping( "org-description" );
sqlQuery.list();
// next try a partial mapping def
sqlQuery.addRoot( "org", Organization.class );
sqlQuery.addFetch( "emp", "org", "employments" );
sqlQuery.list();
// now try full explicit mappings
sqlQuery.addRoot( "org", Organization.class )
.addProperty( "id", "orgid" )
.addProperty( "name" ).addColumnAlias( "name" );
sqlQuery.addFetch( "emp", "org", "employments" )
.addProperty( "key", "employer" )
.addProperty( "element", "empid" )
.addProperty( "element.employee", "employee" )
.addProperty( "element.employer", "employer" )
.addProperty( "element.startDate", "startDate" )
.addProperty( "element.endDate", "endDate" )
.addProperty( "element.regionCode", "regionCode" )
.addProperty( "element.employmentId", "empId" )
.addProperty( "element.salary" ).addColumnAlias( "VALUE" ).addColumnAlias( "CURRENCY" );
sqlQuery.list();
// lets try a totally different approach now and pull back scalars, first with explicit types
sqlQuery.addScalar( "orgid", LongType.INSTANCE )
.addScalar( "name", StringType.INSTANCE )
.addScalar( "empid", LongType.INSTANCE )
.addScalar( "employee", LongType.INSTANCE )
.addScalar( "startDate", TimestampType.INSTANCE )
.addScalar( "endDate", TimestampType.INSTANCE )
.addScalar( "regionCode", StringType.INSTANCE )
.addScalar( "empId", LongType.INSTANCE )
.addScalar( "VALUE", FloatType.INSTANCE )
.addScalar( "CURRENCY", StringType.INSTANCE );
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
s.delete( emp );
s.delete( jboss );
s.delete( me );
s.getTransaction().commit();
s.close();
}
public void testMixAndMatchEntityScalar() {
Session s = openSession();
Transaction t = s.beginTransaction();
Speech speech = new Speech();
speech.setLength( new Double( 23d ) );
speech.setName( "Mine" );
s.persist( speech );
s.flush();
s.clear();
List l = s.createSQLQuery( "select name, id, flength, name as scalarName from Speech" )
.setResultSetMapping( "speech" )
.list();
assertEquals( l.size(), 1 );
t.rollback();
s.close();
}
private double extractDoubleValue(Object value) {
if ( value instanceof BigInteger ) {
return ( ( BigInteger ) value ).doubleValue();
}
else if ( value instanceof BigDecimal ) {
return ( ( BigDecimal ) value ).doubleValue();
}
else {
return Double.valueOf( value.toString() ).doubleValue();
}
}
public void testAddJoinForManyToMany() {
Session s = openSession();
Transaction t = s.beginTransaction();
Person gavin = new Person( "Gavin" );
Person max = new Person( "Max" );
Person pete = new Person( "Pete" );
Group hibernate = new Group( "Hibernate" );
Group seam = new Group( "Seam" );
s.persist( gavin );
s.persist( max );
s.persist( pete );
s.persist( seam );
s.persist( hibernate );
hibernate.getPersons().add( gavin );
hibernate.getPersons().add( max );
seam.getPersons().add( gavin );
seam.getPersons().add( pete );
s.flush();
s.clear();
// todo : see http://opensource.atlassian.com/projects/hibernate/browse/HHH-3908
// String sqlStr = "SELECT {groupp.*} , {gp.*} " +
// "FROM GROUPP groupp, GROUP_PERSON gp, PERSON person WHERE groupp.ID = gp.GROUP_ID and person.PERID = gp.PERSON_ID";
//
// List l = s.createSQLQuery( sqlStr )
// .addEntity("groupp", Group.class)
// .addJoin("gp","groupp.persons")
// .list();
List l = s.getNamedQuery( "manyToManyFetch" ).list();
//assertEquals( 2, l.size() );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
seam.getPersons().remove( gavin );
seam.getPersons().remove( pete );
hibernate.getPersons().remove( gavin );
hibernate.getPersons().remove( max );
s.delete( seam );
s.delete( hibernate );
s.delete( gavin );
s.delete( max );
s.delete( pete );
t.commit();
s.close();
}
public void testTextTypeInSQLQuery() {
Session s = openSession();
Transaction t = s.beginTransaction();
String description = buildLongString( 15000, 'a' );
TextHolder holder = new TextHolder( description );
s.persist( holder );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
String descriptionRead = ( String ) s.createSQLQuery( getDescriptionsSQL() )
.uniqueResult();
assertEquals( description, descriptionRead );
s.delete( holder );
t.commit();
s.close();
}
public void testImageTypeInSQLQuery() {
Session s = openSession();
Transaction t = s.beginTransaction();
byte[] photo = buildLongByteArray( 15000, true );
ImageHolder holder = new ImageHolder( photo );
s.persist( holder );
t.commit();
s.close();
s = openSession();
t = s.beginTransaction();
byte[] photoRead = ( byte[] ) s.createSQLQuery( getPhotosSQL() )
.uniqueResult();
assertTrue( ArrayHelper.isEquals( photo, photoRead ) );
s.delete( holder );
t.commit();
s.close();
}
private String buildLongString(int size, char baseChar) {
StringBuffer buff = new StringBuffer();
for( int i = 0; i < size; i++ ) {
buff.append( baseChar );
}
return buff.toString();
}
private byte[] buildLongByteArray(int size, boolean on) {
byte[] data = new byte[size];
data[0] = mask( on );
for ( int i = 0; i < size; i++ ) {
data[i] = mask( on );
on = !on;
}
return data;
}
private byte mask(boolean on) {
return on ? ( byte ) 1 : ( byte ) 0;
}
private static class UpperCasedAliasToEntityMapResultTransformer extends BasicTransformerAdapter implements Serializable {
public Object transformTuple(Object[] tuple, String[] aliases) {
Map result = new HashMap( tuple.length );
for ( int i = 0; i < tuple.length; i++ ) {
String alias = aliases[i];
if ( alias != null ) {
result.put( alias.toUpperCase(), tuple[i] );
}
}
return result;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import com.codahale.metrics.Counter;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
/**
* Tests for region replicas. Sad that we cannot isolate these without bringing up a whole
* cluster. See {@link org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster}.
*/
@Category({MediumTests.class, ClientTests.class})
@SuppressWarnings("deprecation")
public class TestReplicasClient {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestReplicasClient.class);
private static final Logger LOG = LoggerFactory.getLogger(TestReplicasClient.class);
private static final int NB_SERVERS = 1;
private static TableName TABLE_NAME;
private Table table = null;
private static final byte[] row = Bytes.toBytes(TestReplicasClient.class.getName());;
private static RegionInfo hriPrimary;
private static HRegionInfo hriSecondary;
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
private static final byte[] f = HConstants.CATALOG_FAMILY;
private final static int REFRESH_PERIOD = 1000;
/**
* This copro is used to synchronize the tests.
*/
public static class SlowMeCopro implements RegionCoprocessor, RegionObserver {
static final AtomicLong sleepTime = new AtomicLong(0);
static final AtomicBoolean slowDownNext = new AtomicBoolean(false);
static final AtomicInteger countOfNext = new AtomicInteger(0);
private static final AtomicReference<CountDownLatch> primaryCdl =
new AtomicReference<>(new CountDownLatch(0));
private static final AtomicReference<CountDownLatch> secondaryCdl =
new AtomicReference<>(new CountDownLatch(0));
Random r = new Random();
public SlowMeCopro() {
}
@Override
public Optional<RegionObserver> getRegionObserver() {
return Optional.of(this);
}
@Override
public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e,
final Get get, final List<Cell> results) throws IOException {
slowdownCode(e);
}
@Override
public void preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e,
final Scan scan) throws IOException {
slowdownCode(e);
}
@Override
public boolean preScannerNext(final ObserverContext<RegionCoprocessorEnvironment> e,
final InternalScanner s, final List<Result> results,
final int limit, final boolean hasMore) throws IOException {
//this will slow down a certain next operation if the conditions are met. The slowness
//will allow the call to go to a replica
if (slowDownNext.get()) {
//have some "next" return successfully from the primary; hence countOfNext checked
if (countOfNext.incrementAndGet() == 2) {
sleepTime.set(2000);
slowdownCode(e);
}
}
return true;
}
private void slowdownCode(final ObserverContext<RegionCoprocessorEnvironment> e) {
if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() == 0) {
LOG.info("We're the primary replicas.");
CountDownLatch latch = getPrimaryCdl().get();
try {
if (sleepTime.get() > 0) {
LOG.info("Sleeping for " + sleepTime.get() + " ms");
Thread.sleep(sleepTime.get());
} else if (latch.getCount() > 0) {
LOG.info("Waiting for the counterCountDownLatch");
latch.await(2, TimeUnit.MINUTES); // To help the tests to finish.
if (latch.getCount() > 0) {
throw new RuntimeException("Can't wait more");
}
}
} catch (InterruptedException e1) {
LOG.error(e1.toString(), e1);
}
} else {
LOG.info("We're not the primary replicas.");
CountDownLatch latch = getSecondaryCdl().get();
try {
if (latch.getCount() > 0) {
LOG.info("Waiting for the secondary counterCountDownLatch");
latch.await(2, TimeUnit.MINUTES); // To help the tests to finish.
if (latch.getCount() > 0) {
throw new RuntimeException("Can't wait more");
}
}
} catch (InterruptedException e1) {
LOG.error(e1.toString(), e1);
}
}
}
public static AtomicReference<CountDownLatch> getPrimaryCdl() {
return primaryCdl;
}
public static AtomicReference<CountDownLatch> getSecondaryCdl() {
return secondaryCdl;
}
}
@BeforeClass
public static void beforeClass() throws Exception {
// enable store file refreshing
HTU.getConfiguration().setInt(
StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, REFRESH_PERIOD);
HTU.getConfiguration().setBoolean("hbase.client.log.scanner.activity", true);
HTU.getConfiguration().setBoolean(MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY, true);
HTU.startMiniCluster(NB_SERVERS);
// Create table then get the single region for our new table.
HTableDescriptor hdt = HTU.createTableDescriptor(TestReplicasClient.class.getSimpleName());
hdt.addCoprocessor(SlowMeCopro.class.getName());
HTU.createTable(hdt, new byte[][]{f}, null);
TABLE_NAME = hdt.getTableName();
try (RegionLocator locator = HTU.getConnection().getRegionLocator(hdt.getTableName())) {
hriPrimary = locator.getRegionLocation(row, false).getRegion();
}
// mock a secondary region info to open
hriSecondary = new HRegionInfo(hriPrimary.getTable(), hriPrimary.getStartKey(),
hriPrimary.getEndKey(), hriPrimary.isSplit(), hriPrimary.getRegionId(), 1);
// No master
LOG.info("Master is going to be stopped");
TestRegionServerNoMaster.stopMasterAndAssignMeta(HTU);
Configuration c = new Configuration(HTU.getConfiguration());
c.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
LOG.info("Master has stopped");
}
@AfterClass
public static void afterClass() throws Exception {
HRegionServer.TEST_SKIP_REPORTING_TRANSITION = false;
HTU.shutdownMiniCluster();
}
@Before
public void before() throws IOException {
HTU.getConnection().clearRegionLocationCache();
try {
openRegion(hriPrimary);
} catch (Exception ignored) {
}
try {
openRegion(hriSecondary);
} catch (Exception ignored) {
}
table = HTU.getConnection().getTable(TABLE_NAME);
}
@After
public void after() throws IOException, KeeperException {
try {
closeRegion(hriSecondary);
} catch (Exception ignored) {
}
try {
closeRegion(hriPrimary);
} catch (Exception ignored) {
}
HTU.getConnection().clearRegionLocationCache();
}
private HRegionServer getRS() {
return HTU.getMiniHBaseCluster().getRegionServer(0);
}
private void openRegion(RegionInfo hri) throws Exception {
try {
if (isRegionOpened(hri)) return;
} catch (Exception e){}
// first version is '0'
AdminProtos.OpenRegionRequest orr = RequestConverter.buildOpenRegionRequest(
getRS().getServerName(), hri, null);
AdminProtos.OpenRegionResponse responseOpen = getRS().getRSRpcServices().openRegion(null, orr);
Assert.assertEquals(1, responseOpen.getOpeningStateCount());
Assert.assertEquals(AdminProtos.OpenRegionResponse.RegionOpeningState.OPENED,
responseOpen.getOpeningState(0));
checkRegionIsOpened(hri);
}
private void closeRegion(RegionInfo hri) throws Exception {
AdminProtos.CloseRegionRequest crr = ProtobufUtil.buildCloseRegionRequest(
getRS().getServerName(), hri.getRegionName());
AdminProtos.CloseRegionResponse responseClose = getRS()
.getRSRpcServices().closeRegion(null, crr);
Assert.assertTrue(responseClose.getClosed());
checkRegionIsClosed(hri.getEncodedName());
}
private void checkRegionIsOpened(RegionInfo hri) throws Exception {
while (!getRS().getRegionsInTransitionInRS().isEmpty()) {
Thread.sleep(1);
}
}
private boolean isRegionOpened(RegionInfo hri) throws Exception {
return getRS().getRegionByEncodedName(hri.getEncodedName()).isAvailable();
}
private void checkRegionIsClosed(String encodedRegionName) throws Exception {
while (!getRS().getRegionsInTransitionInRS().isEmpty()) {
Thread.sleep(1);
}
try {
Assert.assertFalse(getRS().getRegionByEncodedName(encodedRegionName).isAvailable());
} catch (NotServingRegionException expected) {
// That's how it work: if the region is closed we have an exception.
}
// We don't delete the znode here, because there is not always a znode.
}
private void flushRegion(RegionInfo regionInfo) throws IOException {
TestRegionServerNoMaster.flushRegion(HTU, regionInfo);
}
@Test
public void testUseRegionWithoutReplica() throws Exception {
byte[] b1 = Bytes.toBytes("testUseRegionWithoutReplica");
openRegion(hriSecondary);
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(0));
try {
Get g = new Get(b1);
Result r = table.get(g);
Assert.assertFalse(r.isStale());
} finally {
closeRegion(hriSecondary);
}
}
@Test
public void testLocations() throws Exception {
byte[] b1 = Bytes.toBytes("testLocations");
openRegion(hriSecondary);
try (Connection conn = ConnectionFactory.createConnection(HTU.getConfiguration());
RegionLocator locator = conn.getRegionLocator(TABLE_NAME)) {
conn.clearRegionLocationCache();
List<HRegionLocation> rl = locator.getRegionLocations(b1, true);
Assert.assertEquals(2, rl.size());
rl = locator.getRegionLocations(b1, false);
Assert.assertEquals(2, rl.size());
conn.clearRegionLocationCache();
rl = locator.getRegionLocations(b1, false);
Assert.assertEquals(2, rl.size());
rl = locator.getRegionLocations(b1, true);
Assert.assertEquals(2, rl.size());
} finally {
closeRegion(hriSecondary);
}
}
@Test
public void testGetNoResultNoStaleRegionWithReplica() throws Exception {
byte[] b1 = Bytes.toBytes("testGetNoResultNoStaleRegionWithReplica");
openRegion(hriSecondary);
try {
// A get works and is not stale
Get g = new Get(b1);
Result r = table.get(g);
Assert.assertFalse(r.isStale());
} finally {
closeRegion(hriSecondary);
}
}
@Test
public void testGetNoResultStaleRegionWithReplica() throws Exception {
byte[] b1 = Bytes.toBytes("testGetNoResultStaleRegionWithReplica");
openRegion(hriSecondary);
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
try {
Get g = new Get(b1);
g.setConsistency(Consistency.TIMELINE);
Result r = table.get(g);
Assert.assertTrue(r.isStale());
} finally {
SlowMeCopro.getPrimaryCdl().get().countDown();
closeRegion(hriSecondary);
}
}
@Test
public void testGetNoResultNotStaleSleepRegionWithReplica() throws Exception {
byte[] b1 = Bytes.toBytes("testGetNoResultNotStaleSleepRegionWithReplica");
openRegion(hriSecondary);
try {
// We sleep; but we won't go to the stale region as we don't get the stale by default.
SlowMeCopro.sleepTime.set(2000);
Get g = new Get(b1);
Result r = table.get(g);
Assert.assertFalse(r.isStale());
} finally {
SlowMeCopro.sleepTime.set(0);
closeRegion(hriSecondary);
}
}
@Test
public void testFlushTable() throws Exception {
openRegion(hriSecondary);
try {
flushRegion(hriPrimary);
flushRegion(hriSecondary);
Put p = new Put(row);
p.addColumn(f, row, row);
table.put(p);
flushRegion(hriPrimary);
flushRegion(hriSecondary);
} finally {
Delete d = new Delete(row);
table.delete(d);
closeRegion(hriSecondary);
}
}
@Test
public void testFlushPrimary() throws Exception {
openRegion(hriSecondary);
try {
flushRegion(hriPrimary);
Put p = new Put(row);
p.addColumn(f, row, row);
table.put(p);
flushRegion(hriPrimary);
} finally {
Delete d = new Delete(row);
table.delete(d);
closeRegion(hriSecondary);
}
}
@Test
public void testFlushSecondary() throws Exception {
openRegion(hriSecondary);
try {
flushRegion(hriSecondary);
Put p = new Put(row);
p.addColumn(f, row, row);
table.put(p);
flushRegion(hriSecondary);
} catch (TableNotFoundException expected) {
} finally {
Delete d = new Delete(row);
table.delete(d);
closeRegion(hriSecondary);
}
}
@Test
public void testUseRegionWithReplica() throws Exception {
byte[] b1 = Bytes.toBytes("testUseRegionWithReplica");
openRegion(hriSecondary);
try {
// A simple put works, even if there here a second replica
Put p = new Put(b1);
p.addColumn(f, b1, b1);
table.put(p);
LOG.info("Put done");
// A get works and is not stale
Get g = new Get(b1);
Result r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertFalse(r.getColumnCells(f, b1).isEmpty());
LOG.info("get works and is not stale done");
// Even if it we have to wait a little on the main region
SlowMeCopro.sleepTime.set(2000);
g = new Get(b1);
r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertFalse(r.getColumnCells(f, b1).isEmpty());
SlowMeCopro.sleepTime.set(0);
LOG.info("sleep and is not stale done");
// But if we ask for stale we will get it
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertTrue(r.getColumnCells(f, b1).isEmpty());
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("stale done");
// exists works and is not stale
g = new Get(b1);
g.setCheckExistenceOnly(true);
r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertTrue(r.getExists());
LOG.info("exists not stale done");
// exists works on stale but don't see the put
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setCheckExistenceOnly(true);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertFalse("The secondary has stale data", r.getExists());
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("exists stale before flush done");
flushRegion(hriPrimary);
flushRegion(hriSecondary);
LOG.info("flush done");
Thread.sleep(1000 + REFRESH_PERIOD * 2);
// get works and is not stale
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertFalse(r.isEmpty());
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("stale done");
// exists works on stale and we see the put after the flush
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setCheckExistenceOnly(true);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertTrue(r.getExists());
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("exists stale after flush done");
} finally {
SlowMeCopro.getPrimaryCdl().get().countDown();
SlowMeCopro.sleepTime.set(0);
Delete d = new Delete(b1);
table.delete(d);
closeRegion(hriSecondary);
}
}
@Test
public void testHedgedRead() throws Exception {
byte[] b1 = Bytes.toBytes("testHedgedRead");
openRegion(hriSecondary);
try {
// A simple put works, even if there here a second replica
Put p = new Put(b1);
p.addColumn(f, b1, b1);
table.put(p);
LOG.info("Put done");
// A get works and is not stale
Get g = new Get(b1);
Result r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertFalse(r.getColumnCells(f, b1).isEmpty());
LOG.info("get works and is not stale done");
//reset
AsyncConnectionImpl conn = (AsyncConnectionImpl) HTU.getConnection().toAsyncConnection();
Counter hedgedReadOps = conn.getConnectionMetrics().get().hedgedReadOps;
Counter hedgedReadWin = conn.getConnectionMetrics().get().hedgedReadWin;
hedgedReadOps.dec(hedgedReadOps.getCount());
hedgedReadWin.dec(hedgedReadWin.getCount());
// Wait a little on the main region, just enough to happen once hedged read
// and hedged read did not returned faster
long primaryCallTimeoutNs = conn.connConf.getPrimaryCallTimeoutNs();
// The resolution of our timer is 10ms, so we need to sleep a bit more otherwise we may not
// trigger the hedged read...
SlowMeCopro.sleepTime.set(TimeUnit.NANOSECONDS.toMillis(primaryCallTimeoutNs) + 100);
SlowMeCopro.getSecondaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertFalse(r.getColumnCells(f, b1).isEmpty());
Assert.assertEquals(1, hedgedReadOps.getCount());
Assert.assertEquals(0, hedgedReadWin.getCount());
SlowMeCopro.sleepTime.set(0);
SlowMeCopro.getSecondaryCdl().get().countDown();
LOG.info("hedged read occurred but not faster");
// But if we ask for stale we will get it and hedged read returned faster
SlowMeCopro.getPrimaryCdl().set(new CountDownLatch(1));
g = new Get(b1);
g.setConsistency(Consistency.TIMELINE);
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertTrue(r.getColumnCells(f, b1).isEmpty());
Assert.assertEquals(2, hedgedReadOps.getCount());
// we update the metrics after we finish the request so we use a waitFor here, use assert
// directly may cause failure if we run too fast.
HTU.waitFor(10000, () -> hedgedReadWin.getCount() == 1);
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("hedged read occurred and faster");
} finally {
SlowMeCopro.getPrimaryCdl().get().countDown();
SlowMeCopro.getSecondaryCdl().get().countDown();
SlowMeCopro.sleepTime.set(0);
Delete d = new Delete(b1);
table.delete(d);
closeRegion(hriSecondary);
}
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.monitor;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.config.server.AbstractScmEnvironmentRepository;
import org.springframework.cloud.config.server.NativeEnvironmentRepository;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.context.SmartLifecycle;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.PatternMatchUtils;
import lombok.extern.apachecommons.CommonsLog;
/**
* Configuration for a file watcher that detects changes in local files related to the
* environment repository. If any files change the {@link PropertyPathEndpoint} is pinged
* with the paths of the files. This applies to the source files of a local git repository
* (i.e. a git repository with a "file:" URI) or to a native repository.
*
* @author Dave Syer
*
*/
@Configuration
@CommonsLog
@EnableScheduling
public class FileMonitorConfiguration implements SmartLifecycle, ResourceLoaderAware {
@Autowired
PropertyPathEndpoint endpoint;
@Autowired(required = false)
AbstractScmEnvironmentRepository scmRepository;
@Autowired(required = false)
NativeEnvironmentRepository nativeEnvironmentRepository;
private boolean running;
private WatchService watcher;
private Set<Path> directory;
private int phase;
private boolean autoStartup = true;
private ResourceLoader resourceLoader;
private String[] excludes = new String[] { ".*", "#*", "*#" };
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public int getPhase() {
return this.phase;
}
/**
* see {@link #getPhase()}
* @param phase the phase.
*/
public void setPhase(int phase) {
this.phase = phase;
}
@Override
public boolean isRunning() {
return this.running;
}
/**
* @see #isRunning()
* @param running true if running.
*/
public void setRunning(boolean running) {
this.running = running;
}
@Override
public boolean isAutoStartup() {
return this.autoStartup;
}
/**
* @see #isAutoStartup()
* @param autoStartup true to auto start.
*/
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
@Override
public synchronized void start() {
if (!this.running) {
this.directory = getFileRepo();
if (this.directory != null && !this.directory.isEmpty()) {
try {
this.watcher = FileSystems.getDefault().newWatchService();
for (Path path : this.directory) {
walkDirectory(path);
}
}
catch (IOException e) {
}
}
this.running = true;
}
}
@Override
public synchronized void stop() {
if (this.running) {
try {
this.watcher.close();
}
catch (IOException e) {
log.error("Failed to close watcher for " + this.directory.toString(), e);
}
this.running = false;
}
}
@Override
public void stop(Runnable callback) {
stop();
callback.run();
}
@Scheduled(fixedRateString = "${spring.cloud.config.server.monitor.fixedDelay:5000}")
public void poll() {
for (File file : filesFromEvents()) {
this.endpoint.notifyByPath(new LinkedMultiValueMap<String, String>(),
Collections.<String, Object> singletonMap("path",
file.getAbsolutePath()));
}
}
private Set<Path> getFileRepo() {
if (this.scmRepository != null
&& this.scmRepository.getUri().startsWith("file:")) {
try {
return Collections.singleton(Paths.get(this.resourceLoader
.getResource(this.scmRepository.getUri()).getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + this.scmRepository.getUri());
}
}
if (this.nativeEnvironmentRepository != null) {
Set<Path> paths = new LinkedHashSet<>();
for (String path : this.nativeEnvironmentRepository.getSearchLocations()) {
Resource resource = this.resourceLoader.getResource(path);
if (resource.exists()) {
try {
paths.add(Paths.get(resource.getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + path);
}
}
}
return paths;
}
return null;
}
private Set<File> filesFromEvents() {
WatchKey key = this.watcher.poll();
Set<File> files = new LinkedHashSet<File>();
while (key != null) {
for (WatchEvent<?> event : key.pollEvents()) {
if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE
|| event.kind() == StandardWatchEventKinds.ENTRY_MODIFY) {
Path item = (Path) event.context();
File file = new File(((Path) key.watchable()).toAbsolutePath()
+ File.separator + item.getFileName());
if (file.isDirectory()) {
files.addAll(walkDirectory(file.toPath()));
}
else {
if (!file.getPath().contains(".git") && !PatternMatchUtils
.simpleMatch(this.excludes, file.getName())) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": " + file);
}
files.add(file);
}
}
}
else if (event.kind() == StandardWatchEventKinds.OVERFLOW) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
if (event.context() != null && event.context() instanceof Path) {
files.addAll(walkDirectory((Path) event.context()));
}
else {
for (Path path : this.directory) {
files.addAll(walkDirectory(path));
}
}
}
else {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
}
}
key.reset();
key = this.watcher.poll();
}
return files;
}
private Set<File> walkDirectory(Path directory) {
final Set<File> walkedFiles = new LinkedHashSet<File>();
try {
registerWatch(directory);
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir,
BasicFileAttributes attrs) throws IOException {
FileVisitResult fileVisitResult = super.preVisitDirectory(dir, attrs);
registerWatch(dir);
return fileVisitResult;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
FileVisitResult fileVisitResult = super.visitFile(file, attrs);
walkedFiles.add(file.toFile());
return fileVisitResult;
}
});
}
catch (IOException e) {
log.error("Failed to walk directory: " + directory.toString(), e);
}
return walkedFiles;
}
private void registerWatch(Path dir) throws IOException {
if (log.isDebugEnabled()) {
log.debug("registering: " + dir + " for file creation events");
}
dir.register(this.watcher, StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_MODIFY);
}
}
| |
package com.pearson.docussandra;
import com.pearson.docussandra.exception.IndexParseFieldException;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author https://github.com/JeffreyDeYoung
*/
public class ParseUtilsTest
{
private static Logger logger = LoggerFactory.getLogger(ParseUtilsTest.class);
public ParseUtilsTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of parseBase64StringAsByteBuffer method, of class ParseUtils.
*/
@Test
public void testParseBase64StringAsByteBuffer() throws Exception
{
System.out.println("parseBase64StringAsByteBuffer");
//good test
String in = "VGhpcyBpcyBhIGdvb2RseSB0ZXN0IG1lc3NhZ2Uu";
ByteBuffer result = ParseUtils.parseBase64StringAsByteBuffer(in);
assertNotNull(result);
assertTrue(result.hasArray());
assertTrue(result.hasRemaining());
assertTrue(result.array().length != 0);
assertEquals(new String(result.array()), "This is a goodly test message.");
}
/**
* Test of parseStringAsBoolean method, of class ParseUtils.
*/
@Test
public void testParseStringAsBoolean() throws Exception
{
System.out.println("parseStringAsBoolean");
//false
boolean result = ParseUtils.parseStringAsBoolean("false");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("False");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("faLSe");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("FALSE");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("f");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("F");
assertEquals(false, result);
result = ParseUtils.parseStringAsBoolean("0");
assertEquals(false, result);
//true
result = ParseUtils.parseStringAsBoolean("true");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("True");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("tRUe");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("TRUE");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("t");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("T");
assertEquals(true, result);
result = ParseUtils.parseStringAsBoolean("1");
assertEquals(true, result);
//exception
boolean expectExceptionThrown = false;
try
{
ParseUtils.parseStringAsBoolean("");
} catch (IndexParseFieldException e)
{
expectExceptionThrown = true;
assertNull(e.getCause());
assertNotNull(e.getMessage());
}
assertTrue(expectExceptionThrown);
expectExceptionThrown = false;
try
{
ParseUtils.parseStringAsBoolean("blah");
} catch (IndexParseFieldException e)
{
expectExceptionThrown = true;
assertNull(e.getCause());
assertNotNull(e.getMessage());
assertTrue(e.getMessage().contains("blah"));
assertEquals(e.getFieldValue(), "blah");
}
assertTrue(expectExceptionThrown);
}
/**
* Test of parseStringAsDate method, of class ParseUtils.
*/
@Test
public void testParseStringAsDate() throws Exception
{
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
System.out.println("parseStringAsDate");
Date testDate = new Date();
String in = testDate.toString();
Date result = ParseUtils.parseStringAsDate(in);//Natty catches this one
assertEquals(testDate.getTime(), result.getTime(), 100l);
DateFormat format = DateFormat.getDateInstance(DateFormat.LONG);
in = format.format(testDate);
result = ParseUtils.parseStringAsDate(in);//Natty catches this one
assertEquals(testDate.getTime(), result.getTime(), 3600l);
testDate = new Date();
testDate.setYear(3);
testDate.setMonth(11);
testDate.setDate(17);
testDate.setHours(0);
testDate.setMinutes(0);
testDate.setSeconds(0);
//testDate.setTime(testDate.getTime() + (3600000 * -8));
in = "12/17/1903";
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 0l);
//in = "17/12/1903";//nope!
in = "17 Dec 1903 00:00:00";//Natty catches this one
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 0l);
SimpleDateFormat simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
in = simpleDtFormat.format(testDate);
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 100l);
in = "2015-07-04T23:54:00.000-06:00";
result = ParseUtils.parseStringAsDate(in);
assertEquals(simpleDtFormat.parse(in).getTime(), result.getTime(), 100l);
in = "2015-07-04T23:54:00-06:00";
simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
result = ParseUtils.parseStringAsDate(in);
assertEquals(simpleDtFormat.parse(in).getTime(), result.getTime(), 100l);
in = "2015-07-04T23:54-06:00";
simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mmXXX");
result = ParseUtils.parseStringAsDate(in);
assertEquals(simpleDtFormat.parse(in).getTime(), result.getTime(), 100l);
simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
in = simpleDtFormat.format(testDate);
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 100l);
simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
in = simpleDtFormat.format(testDate);
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 100l);
simpleDtFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
in = simpleDtFormat.format(testDate);
result = ParseUtils.parseStringAsDate(in);
assertEquals(testDate.getTime(), result.getTime(), 100l);
}
/**
* Test of parseStringAsDouble method, of class ParseUtils.
*/
@Test
public void testParseStringAsDouble() throws Exception
{
System.out.println("parseStringAsDouble");
String in = "0";
double expResult = 0.0;
double result = ParseUtils.parseStringAsDouble(in);
assertEquals(expResult, result, 0.0);
in = "1.123456";
expResult = 1.123456;
result = ParseUtils.parseStringAsDouble(in);
assertEquals(expResult, result, 0.0);
in = "-1.123456";
expResult = -1.123456;
result = ParseUtils.parseStringAsDouble(in);
assertEquals(expResult, result, 0.0);
boolean expectExceptionThrown = false;
try
{
ParseUtils.parseStringAsDouble("dafhfda");
} catch (IndexParseFieldException e)
{
expectExceptionThrown = true;
assertNotNull(e.getCause());
assertNotNull(e.getMessage());
}
assertTrue(expectExceptionThrown);
}
/**
* Test of parseStringAsInt method, of class ParseUtils.
*/
@Test
public void testParseStringAsInteger() throws Exception
{
System.out.println("parseStringAsInt");
String in = "0";
int expResult = 0;
int result = ParseUtils.parseStringAsInt(in);
assertEquals(expResult, result);
in = "1";
expResult = 1;
result = ParseUtils.parseStringAsInt(in);
assertEquals(expResult, result, 0.0);
in = "-1";
expResult = -1;
result = ParseUtils.parseStringAsInt(in);
assertEquals(expResult, result, 0.0);
boolean expectExceptionThrown = false;
try
{
ParseUtils.parseStringAsInt("dafhfda");
} catch (IndexParseFieldException e)
{
expectExceptionThrown = true;
assertNotNull(e.getCause());
assertNotNull(e.getMessage());
}
assertTrue(expectExceptionThrown);
}
}
| |
/*
* Copyright (c) 2014-2021 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.polygon.connector.ldap.search;
import java.util.Base64;
import java.util.List;
import com.evolveum.polygon.connector.ldap.*;
import com.evolveum.polygon.connector.ldap.connection.ConnectionManager;
import org.apache.directory.api.ldap.extras.controls.vlv.VirtualListViewRequest;
import org.apache.directory.api.ldap.extras.controls.vlv.VirtualListViewRequestImpl;
import org.apache.directory.api.ldap.extras.controls.vlv.VirtualListViewResponse;
import org.apache.directory.api.ldap.extras.controls.vlv.VirtualListViewResultCode;
import org.apache.directory.api.ldap.model.cursor.CursorException;
import org.apache.directory.api.ldap.model.cursor.SearchCursor;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.filter.ExprNode;
import org.apache.directory.api.ldap.model.message.LdapResult;
import org.apache.directory.api.ldap.model.message.Referral;
import org.apache.directory.api.ldap.model.message.Response;
import org.apache.directory.api.ldap.model.message.ResultCodeEnum;
import org.apache.directory.api.ldap.model.message.SearchRequest;
import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
import org.apache.directory.api.ldap.model.message.SearchResultDone;
import org.apache.directory.api.ldap.model.message.SearchResultEntry;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.api.ldap.model.message.controls.SortRequest;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.ldap.client.api.exception.InvalidConnectionException;
import org.apache.directory.ldap.client.api.exception.LdapConnectionTimeOutException;
import org.identityconnectors.common.logging.Log;
import org.identityconnectors.framework.common.exceptions.ConfigurationException;
import org.identityconnectors.framework.common.exceptions.ConnectorIOException;
import org.identityconnectors.framework.common.objects.ObjectClass;
import org.identityconnectors.framework.common.objects.OperationOptions;
import org.identityconnectors.framework.common.objects.ResultsHandler;
import com.evolveum.polygon.connector.ldap.schema.AbstractSchemaTranslator;
/**
* @author semancik
*
*/
public class VlvSearchStrategy<C extends AbstractLdapConfiguration> extends SearchStrategy<C> {
private static final Log LOG = Log.getLog(VlvSearchStrategy.class);
private int lastListSize = -1;
private byte[] cookie = null;
public VlvSearchStrategy(ConnectionManager<C> connectionManager, AbstractLdapConfiguration configuration,
AbstractSchemaTranslator<C> schemaTranslator, ObjectClass objectClass,
org.apache.directory.api.ldap.model.schema.ObjectClass ldapObjectClass,
ResultsHandler handler, ErrorHandler errorHandler, ConnectionLog connectionLog,
OperationOptions options) {
super(connectionManager, configuration, schemaTranslator, objectClass, ldapObjectClass, handler, errorHandler, connectionLog, options);
}
/* (non-Javadoc)
* @see com.evolveum.polygon.connector.ldap.search.SearchStrategy#search(java.lang.String, org.apache.directory.api.ldap.model.filter.ExprNode, org.apache.directory.api.ldap.model.message.SearchScope, java.lang.String[])
*/
@Override
public void search(Dn baseDn, ExprNode filterNode, SearchScope scope, String[] attributes)
throws LdapException {
boolean proceed = true;
int index = 1;
if (getOptions() != null && getOptions().getPagedResultsOffset() != null) {
if (getOptions().getPagedResultsOffset() < 1) {
throw new UnsupportedOperationException("Offset "+getOptions().getPagedResultsOffset()+" is not supported when VLV is used");
}
index = getOptions().getPagedResultsOffset();
}
Integer numberOfEntriesToReturn = null; // null means "as many as there are"
if (getOptions() != null && getOptions().getPageSize() != null) {
numberOfEntriesToReturn = getOptions().getPageSize();
}
String vlvSortAttributeName = null;
if (!hasSortOption()) {
// Do not even try to do this if there is explicit sort option. This saves times and avoid some failures.
String vlvSortAttributeConfig = getConfiguration().getVlvSortAttribute();
List<String> vlvSortAttributeCandidateList = LdapUtil.splitComma(vlvSortAttributeConfig);
vlvSortAttributeName = getSchemaTranslator().selectAttribute(getLdapObjectClass(), vlvSortAttributeCandidateList);
if (vlvSortAttributeName == null) {
throw new ConfigurationException("Cannot find appropriate sort attribute for object class "+getLdapObjectClass().getName()
+", tried "+vlvSortAttributeCandidateList + " ("+vlvSortAttributeConfig+")");
}
}
SortRequest sortReqControl = createSortControl(vlvSortAttributeName, getConfiguration().getVlvSortOrderingRule());
sortReqControl.setCritical(true);
lastListSize = 0;
cookie = null;
if (getOptions() != null && getOptions().getPagedResultsCookie() != null) {
cookie = Base64.getDecoder().decode(getOptions().getPagedResultsCookie());
}
connect(baseDn);
Dn lastResultDn = null;
int numberOfResutlsReturned = 0;
OUTER: while (proceed) {
SearchRequest req = new SearchRequestImpl();
req.setBase(baseDn);
req.setFilter(preProcessSearchFilter(filterNode));
req.setScope(scope);
applyCommonConfiguration(req);
if (attributes != null) {
req.addAttributes(attributes);
}
if (sortReqControl != null) {
req.addControl(sortReqControl);
}
// VLV
int afterCount = getDefaultPageSize() - 1;
if (numberOfEntriesToReturn != null && (numberOfResutlsReturned + afterCount + 1 > numberOfEntriesToReturn)) {
afterCount = numberOfEntriesToReturn - numberOfResutlsReturned - 1;
}
VirtualListViewRequest vlvReqControl = new VirtualListViewRequestImpl();
vlvReqControl.setCritical(true);
vlvReqControl.setBeforeCount(0);
vlvReqControl.setAfterCount(afterCount);
vlvReqControl.setOffset(index);
vlvReqControl.setContentCount(lastListSize);
vlvReqControl.setContextId(cookie);
req.addControl(vlvReqControl);
int responseResultCount = 0;
SearchCursor searchCursor = executeSearch(req);
try {
while (proceed) {
try {
if (!searchCursor.next()) {
break;
}
} catch (LdapConnectionTimeOutException | InvalidConnectionException e) {
logSearchError(req, responseResultCount, e);
// Server disconnected. And by some miracle this was not caught by
// checkAlive or connection manager.
LOG.ok("Connection error ({0}), reconnecting", e.getMessage(), e);
// No need to close the cursor here. It is already closed as part of error handling in next() method.
connectionReconnect(baseDn, e);
incrementRetryAttempts();
continue OUTER;
}
Response response = searchCursor.get();
if (response instanceof SearchResultEntry) {
responseResultCount++;
Entry entry = ((SearchResultEntry)response).getEntry();
logSearchResult(entry);
boolean overlap = false;
if (lastResultDn != null) {
if (lastResultDn.equals(entry.getDn())) {
LOG.warn("Working around rounding error overlap at index {0} (name={1})", index, lastResultDn);
overlap = true;
}
lastResultDn = null;
}
if (!overlap) {
proceed = handleResult(entry);
numberOfResutlsReturned++;
}
index++;
if (!proceed) {
LOG.ok("Ending search because handler returned false");
// We really want to abandon the operation here.
LdapUtil.closeAbandonCursor(searchCursor);
break;
}
lastResultDn = entry.getDn();
} else {
LOG.warn("Got unexpected response: {0}", response);
}
}
SearchResultDone searchResultDone = searchCursor.getSearchResultDone();
logSearchOperationDone(req, responseResultCount, searchResultDone);
// We really want to call searchCursor.next() here, even though we do not care about the result.
// The implementation of cursor.next() sets the "done" status of the cursor.
// If we do not do that, the subsequent close() operation on the cursor will send an
// ABANDON command, even though the operation is already finished. (MID-7091)
searchCursor.next();
// We want to do close with ABANDON here, in case that the operation is not finished.
// However, make sure we call searchCursor.next() before closing, we do not want to send abandons when not needed.
LdapUtil.closeAbandonCursor(searchCursor);
if (searchResultDone == null) {
if (proceed) {
// This should not happen. If the search was terminated by the server, there should be "done" record.
// May this be caused by server closing connection and the Directory API not detecting that?
returnConnection();
LOG.error("Search was not finished properly, {0} entries were received, but the \"done\" response was not received", responseResultCount);
throw new ConnectorIOException("LDAP search was not finished properly, the results may be incomplete.");
} else {
// The search was terminated due to our decision. The "done" record is not expected.
break;
}
} else {
LdapResult ldapResult = searchResultDone.getLdapResult();
// process VLV response
VirtualListViewResponse vlvResponseControl = (VirtualListViewResponse)searchResultDone.getControl(VirtualListViewResponse.OID);
String extra = "no VLV response control";
if (vlvResponseControl != null) {
StringBuilder sb = new StringBuilder();
sb.append("VLV targetPosition=");
sb.append(vlvResponseControl.getTargetPosition());
sb.append(", contentCount=");
sb.append(vlvResponseControl.getContentCount());
if (vlvResponseControl.getContextId() != null) {
sb.append(", contextID=");
byte[] contextId = vlvResponseControl.getContextId();
if (contextId == null) {
sb.append("null");
} else {
sb.append(Base64.getEncoder().encodeToString(vlvResponseControl.getContextId()));
}
}
sb.append(", result=");
if (vlvResponseControl.getVirtualListViewResult() == null) {
sb.append("null");
} else {
sb.append(vlvResponseControl.getVirtualListViewResult().name());
sb.append("(").append(vlvResponseControl.getVirtualListViewResult().getValue()).append(")");
}
extra = sb.toString();
cookie = vlvResponseControl.getContextId();
if (vlvResponseControl.getContentCount() == 0) {
lastListSize = -1;
} else {
lastListSize = vlvResponseControl.getContentCount();
}
if (vlvResponseControl.getVirtualListViewResult() == VirtualListViewResultCode.OFFSETRANGEERROR
|| vlvResponseControl.getVirtualListViewResult() == VirtualListViewResultCode.OPENLDAP_RANGEERRROR) {
// The offset is out of range. Do not indicate that as an error. Just return empty search results.
LOG.ok("Ending search because VLV response indicated offset out of range (resultCode={0})", vlvResponseControl.getVirtualListViewResult().getValue());
break;
}
} else {
cookie = null;
lastListSize = -1;
}
logSearchResult( "Done", ldapResult, extra);
if (ldapResult.getResultCode() == ResultCodeEnum.REFERRAL) {
LOG.ok("Ignoring referral {0}", ldapResult.getReferral());
} else if (ldapResult.getResultCode() == ResultCodeEnum.SUCCESS) {
// continue the loop
} else if (ldapResult.getResultCode() == ResultCodeEnum.BUSY) {
// OpenLDAP gives this error when the server SSS/VLV resources are depleted. It looks like there is no
// better way how to clean that up than to drop connection and reconnect.
incrementRetryAttempts();
LOG.ok("Got BUSY response after VLV search. reconnecting and retrying");
connectionReconnect(baseDn, new RuntimeException("BUSY response after VLV search"));
if (connection == null) {
throw new ConnectorIOException("Cannot reconnect (baseDn="+baseDn+")");
}
lastListSize = 0;
cookie = null;
continue;
} else {
String msg = "LDAP error during search in "+baseDn+": "+LdapUtil.formatLdapMessage(ldapResult);
if (ldapResult.getResultCode() == ResultCodeEnum.SIZE_LIMIT_EXCEEDED && getOptions() != null && getOptions().getAllowPartialResults() != null && getOptions().getAllowPartialResults()) {
LOG.ok("{0} (allowed error)", msg);
setCompleteResultSet(false);
break;
} else {
RuntimeException connidException = processLdapResult("LDAP error during search in " + baseDn, ldapResult);
if (connidException instanceof ReconnectException) {
reconnectSameServer(connidException);
incrementRetryAttempts();
// Next iteration of the loop will re-try the operation with the same parameter, but different connection
// TODO: Handling of cookie and lastListSize is questionable here.
// Will the cookie be useful in a new connection? We have to experiment with this to see.
// However, these errors are rare, and almost impossible to reproduce in controlled environment.
continue;
} else {
LOG.error("{0}", msg);
returnConnection();
throw connidException;
}
}
}
}
} catch (CursorException e) {
LOG.error("Mysterions CursorException in VLV search: {0}", e.getMessage(), e);
returnConnection();
// TODO: better error handling
throw new ConnectorIOException(e.getMessage(), e);
}
if (lastListSize > 0 && index > lastListSize) {
LOG.ok("Ending VLV search because index ({0}) went over list size ({1})", index, lastListSize);
break;
}
if (numberOfEntriesToReturn != null && numberOfEntriesToReturn <= numberOfResutlsReturned) {
LOG.ok("Ending VLV search because enough entries already returned");
break;
}
if (responseResultCount == 0) {
LOG.warn("Ending VLV search because received no results");
break;
}
}
// TODO: close connection to purge the search state?
returnConnection();
}
@Override
public int getRemainingPagedResults() {
if (lastListSize < 0) {
return lastListSize;
}
int offset = 0;
if (getOptions() != null && getOptions().getPagedResultsOffset() != null) {
offset = getOptions().getPagedResultsOffset() - 1;
}
return lastListSize - offset - getNumberOfEntriesFound();
}
@Override
public String getPagedResultsCookie() {
if (cookie == null) {
return null;
}
return Base64.getEncoder().encodeToString(cookie);
}
@Override
protected String getStrategyTag() {
return "vlv";
}
}
| |
/*
* Copyright (c) 2003-2013, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.bundle.desktop.swing;
import java.awt.Color;
import java.awt.Component;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import org.osgi.framework.Bundle;
import org.knopflerfish.framework.Util.HeaderEntry;
/**
* Image icon for a bundle that paints different overlays based on the bundles
* state.
*/
public class BundleImageIcon
extends ImageIcon
{
private static final long serialVersionUID = 1L;
/**
* The name of the bundle icon manifest header.
*
* This constant should be in {@link org.osgi.framework.Constants} but is
* not...
*/
final static String BUNDLE_ICON = "Bundle-Icon";
// Image icons for the bundle state overlays:
static final ImageIcon OVERLAY_ACTIVE =
new ImageIcon(BundleImageIcon.class.getResource("/overlay_active.png"));
static final ImageIcon OVERLAY_INSTALLED = null; // No overlay for installed
static final ImageIcon OVERLAY_RESOLVED =
new ImageIcon(BundleImageIcon.class.getResource("/overlay_resolved.png"));
static final ImageIcon OVERLAY_STARTING =
new ImageIcon(BundleImageIcon.class.getResource("/overlay_starting.png"));
static final ImageIcon OVERLAY_STOPPING =
new ImageIcon(BundleImageIcon.class.getResource("/overlay_stopping.png"));
static final ImageIcon OVERLAY_UNINSTALLED = null; // No overlay for
// uninstalled
/**
* Get the bundle icon URL for icon with size 32 from the manifest header
* "Bundle-Icon".
*
* <p>
* Care must be taken to not trigger a resolve of the bundle when reading the
* icon image from inside it.
* </p>
*
* @param bundle
* The bundle to get icon URL for.
*
* @return
*/
private static URL getBundleIconURL(final Bundle bundle)
{
URL res = null;
final String bih = bundle.getHeaders().get(BUNDLE_ICON);
if (null != bih && 0 < bih.length()) {
// Re-uses the manifest entry parser from the KF-framework
try {
String iconName = null;
int iconSize = -1;
// We prefer a 32x32 size icon.
for (final HeaderEntry he : org.knopflerfish.framework.Util
.parseManifestHeader(BUNDLE_ICON, bih, false, true, false)) {
final List<String> icns = he.getKeys();
final String sizeS = (String) he.getAttributes().get("size");
if (null == sizeS) {
// Icon with unspecified size; use it if no other icon
// has been found.
if (null == iconName) {
iconName = icns.get(0);
}
} else {
int size = -1;
try {
size = Integer.parseInt(sizeS);
} catch (final NumberFormatException nfe) {
}
if (-1 < size) {
if (-1 == iconSize) {
// First icon with a valid size; start with it.
iconName = icns.get(0);
iconSize = size;
} else if (Math.abs(size - 32) < Math.abs(iconSize - 32)) {
// Icon is closer in size to 32 than old icon; use it
iconName = icns.get(0);
iconSize = size;
}
}
}
}
if (null != iconName) {
try {
try {
res = new URL(iconName);
} catch (final MalformedURLException mfe) {
// iconName is not a valid URL; assume it is a resource path
// Use bundle.getEntry() to avoid resolving the bundle.
res = bundle.getEntry(iconName);
if (null == res) {
Activator.log.warn("Failed to load icon with name '" + iconName
+ "' from bundle #" + bundle.getBundleId()
+ " (" + Util.getBundleName(bundle)
+ "): No such entry.");
}
}
} catch (final Exception e) {
Activator.log.error("Failed to load icon with name '" + iconName
+ "' from bundle #" + bundle.getBundleId()
+ " (" + Util.getBundleName(bundle) + "): "
+ e.getMessage(), e);
}
}
} catch (final IllegalArgumentException iae) {
Activator.log.error("Failed to parse Bundle-Icon header for #"
+ bundle.getBundleId() + " ("
+ Util.getBundleName(bundle) + "): "
+ iae.getMessage(), iae);
}
}
return res;
}
/**
* Get the bundle icon URL for icon with size 32 from the Knopflerfish defined
* manifest header "Application-Icon".
*
* @param bundle
* the bundle to get an application icon for.
* @return
*/
private static URL getApplicationIconURL(final Bundle bundle)
{
URL res = null;
String iconName = bundle.getHeaders().get("Application-Icon");
if (iconName != null) {
iconName = iconName.trim();
}
if (iconName != null && 0 < iconName.length()) {
try {
res = bundle.getEntry(iconName);
if (null == res) {
Activator.log.warn("Failed to load icon with name '" + iconName
+ "' from bundle #" + bundle.getBundleId() + " ("
+ Util.getBundleName(bundle) + "): No such resource.");
}
} catch (final Exception e) {
Activator.log.error("Failed to load icon with name '" + iconName
+ "' from bundle #" + bundle.getBundleId() + " ("
+ Util.getBundleName(bundle) + "): " + e.getMessage(), e);
}
}
return res;
}
/**
* Get the URL of the icon to use for the given bundle.
*
* @param bundle
* the bundle to create a bundle image icon for.
* @return URL to the icon for for the given bundle.
*/
private static URL getIconUrl(Bundle bundle)
{
URL appURL = getBundleIconURL(bundle);
if (null == appURL) {
appURL = getApplicationIconURL(bundle);
}
if (appURL == null) {
if (Util.hasMainClass(bundle)) {
appURL = Util.class.getResource("/jarexec.png");
} else if (Util.hasFragment(bundle)) {
appURL = Util.class.getResource("/frag.png");
} else if (Util.hasComponent(bundle)) {
appURL = Util.class.getResource("/component.png");
} else if (Util.hasActivator(bundle)) {
appURL = Util.class.getResource("/bundle.png");
} else {
appURL = Util.class.getResource("/bundle-lib.png");
}
}
return appURL;
}
/**
* The bundle that this icon image belongs to.
*/
Bundle bundle;
/**
* The last modified time stamp for the bundle. Used to detect when image
* needs to be updated.
*/
long lastModified;
/**
* Create a 32 by 32 image icon for a bundle using the icon specified in the
* bundles manifest.
*
* <p>
* If the bundles has not specified an icon image then the default images
* defined by the desktop bundle will be used.
* </p>
*
* @param bundle
* The bundle that this icon represents.
*/
public BundleImageIcon(Bundle bundle)
{
this(bundle, getIconUrl(bundle));
}
/**
* Image icon for a bundle. The paint method will draw different overlays
* based on the bundles state.
*
* @param bundle
* The bundle that this icon represents.
* @param url
* URL to icon image, should be 32x32. If the icon image has another
* size it will be scaled to 32x32.
*/
public BundleImageIcon(Bundle bundle, URL url)
{
super(url);
this.bundle = bundle;
this.lastModified = Long.MIN_VALUE;
// Load the icon and ensure size.
updateIcon();
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y)
{
updateIcon();
super.paintIcon(c, g, x, y);
Icon overlay = null;
switch (bundle.getState()) {
case Bundle.ACTIVE:
overlay = OVERLAY_ACTIVE;
break;
case Bundle.INSTALLED:
overlay = OVERLAY_INSTALLED;
break;
case Bundle.RESOLVED:
overlay = OVERLAY_RESOLVED;
break;
case Bundle.STARTING:
overlay = OVERLAY_STARTING;
break;
case Bundle.STOPPING:
overlay = OVERLAY_STOPPING;
break;
case Bundle.UNINSTALLED:
overlay = OVERLAY_UNINSTALLED;
break;
default:
}
if (overlay != null) {
final int x1 = x + (getIconWidth() - overlay.getIconWidth());
final int y1 = y + (getIconHeight() - overlay.getIconHeight());
final int w = overlay.getIconWidth();
final int h = overlay.getIconHeight();
g.setColor(Color.white);
g.fill3DRect(x1 - 1, y1 - 1, w + 2, h + 2, true);
overlay.paintIcon(c, g, x1, y1);
}
}
/**
* If the bundle has been updated since the last call to this method we must
* reload the icon image to ensure that the current image is used.
*/
private void updateIcon()
{
if (lastModified < bundle.getLastModified()) {
// Load new icon
final URL iconUrl = getIconUrl(bundle);
final ImageIcon ii = new ImageIcon(iconUrl);
// Force image to load; so that we can check the size
setImage(ii.getImage());
lastModified = bundle.getLastModified();
loadImage(getImage());
if (32 < getIconWidth() || 32 < getIconHeight()) {
// Image too large; scale it down to 32x32.
setImage(getScaledImage(getImage(), 32, 32));
}
}
}
/**
* Resizes an image using a Graphics2D object backed by a BufferedImage.
*
* @param srcImg
* - source image to scale
* @param w
* - desired width
* @param h
* - desired height
* @return - the new resized image
*/
private static Image getScaledImage(Image srcImg, int w, int h)
{
final BufferedImage resizedImg =
new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
final Graphics2D g2 = resizedImg.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g2.drawImage(srcImg, 0, 0, w, h, null);
g2.dispose();
return resizedImg;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.appservice.implementation;
import com.google.common.base.Function;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.microsoft.azure.management.apigeneration.LangDefinition;
import com.microsoft.azure.management.appservice.AppServiceCertificate;
import com.microsoft.azure.management.appservice.AppServiceDomain;
import com.microsoft.azure.management.appservice.AppSetting;
import com.microsoft.azure.management.appservice.AzureResourceType;
import com.microsoft.azure.management.appservice.CloningInfo;
import com.microsoft.azure.management.appservice.ConnStringValueTypePair;
import com.microsoft.azure.management.appservice.ConnectionString;
import com.microsoft.azure.management.appservice.ConnectionStringType;
import com.microsoft.azure.management.appservice.CustomHostNameDnsRecordType;
import com.microsoft.azure.management.appservice.HostNameBinding;
import com.microsoft.azure.management.appservice.HostNameSslState;
import com.microsoft.azure.management.appservice.HostNameType;
import com.microsoft.azure.management.appservice.JavaVersion;
import com.microsoft.azure.management.appservice.ManagedPipelineMode;
import com.microsoft.azure.management.appservice.NetFrameworkVersion;
import com.microsoft.azure.management.appservice.PhpVersion;
import com.microsoft.azure.management.appservice.PlatformArchitecture;
import com.microsoft.azure.management.appservice.PythonVersion;
import com.microsoft.azure.management.appservice.RemoteVisualStudioVersion;
import com.microsoft.azure.management.appservice.SiteAvailabilityState;
import com.microsoft.azure.management.appservice.SslState;
import com.microsoft.azure.management.appservice.UsageState;
import com.microsoft.azure.management.appservice.WebAppBase;
import com.microsoft.azure.management.appservice.WebContainer;
import com.microsoft.azure.management.resources.fluentcore.arm.models.implementation.GroupableResourceImpl;
import com.microsoft.azure.management.resources.fluentcore.utils.Utils;
import org.joda.time.DateTime;
import rx.Observable;
import rx.functions.Func1;
import rx.functions.Func3;
import rx.functions.FuncN;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The implementation for {@link WebAppBase}.
* @param <FluentT> the fluent interface of the web app or deployment slot
* @param <FluentImplT> the fluent implementation of the web app or deployment slot
*/
@LangDefinition(ContainerName = "/Microsoft.Azure.Management.AppService.Fluent")
abstract class WebAppBaseImpl<
FluentT extends WebAppBase,
FluentImplT extends WebAppBaseImpl<FluentT, FluentImplT>>
extends GroupableResourceImpl<
FluentT,
SiteInner,
FluentImplT,
AppServiceManager>
implements
WebAppBase,
WebAppBase.Definition<FluentT>,
WebAppBase.Update<FluentT>,
WebAppBase.UpdateStages.WithWebContainer<FluentT> {
final WebAppsInner client;
final WebSiteManagementClientImpl serviceClient;
Map<String, AppSetting> cachedAppSettings;
Map<String, ConnectionString> cachedConnectionStrings;
private Set<String> hostNamesSet;
private Set<String> enabledHostNamesSet;
private Set<String> trafficManagerHostNamesSet;
private Set<String> outboundIpAddressesSet;
private Map<String, HostNameSslState> hostNameSslStateMap;
private Map<String, HostNameBindingImpl<FluentT, FluentImplT>> hostNameBindingsToCreate;
private List<String> hostNameBindingsToDelete;
private Map<String, HostNameSslBindingImpl<FluentT, FluentImplT>> sslBindingsToCreate;
private Map<String, String> appSettingsToAdd;
private List<String> appSettingsToRemove;
private Map<String, Boolean> appSettingStickiness;
private Map<String, ConnStringValueTypePair> connectionStringsToAdd;
private List<String> connectionStringsToRemove;
private Map<String, Boolean> connectionStringStickiness;
private WebAppSourceControlImpl<FluentT, FluentImplT> sourceControl;
private boolean sourceControlToDelete;
WebAppBaseImpl(String name, SiteInner innerObject, SiteConfigInner configObject, final WebAppsInner client, AppServiceManager manager, WebSiteManagementClientImpl serviceClient) {
super(name, innerObject, manager);
this.client = client;
this.serviceClient = serviceClient;
this.inner().withSiteConfig(configObject);
normalizeProperties();
}
@SuppressWarnings("unchecked")
private FluentT normalizeProperties() {
this.hostNameBindingsToCreate = new HashMap<>();
this.hostNameBindingsToDelete = new ArrayList<>();
this.appSettingsToAdd = new HashMap<>();
this.appSettingsToRemove = new ArrayList<>();
this.appSettingStickiness = new HashMap<>();
this.connectionStringsToAdd = new HashMap<>();
this.connectionStringsToRemove = new ArrayList<>();
this.connectionStringStickiness = new HashMap<>();
this.sourceControl = null;
this.sourceControlToDelete = false;
this.sslBindingsToCreate = new HashMap<>();
if (inner().hostNames() != null) {
this.hostNamesSet = Sets.newHashSet(inner().hostNames());
}
if (inner().enabledHostNames() != null) {
this.enabledHostNamesSet = Sets.newHashSet(inner().enabledHostNames());
}
if (inner().trafficManagerHostNames() != null) {
this.trafficManagerHostNamesSet = Sets.newHashSet(inner().trafficManagerHostNames());
}
if (inner().outboundIpAddresses() != null) {
this.outboundIpAddressesSet = Sets.newHashSet(inner().outboundIpAddresses().split(",[ ]*"));
}
this.hostNameSslStateMap = new HashMap<>();
if (inner().hostNameSslStates() != null) {
for (HostNameSslState hostNameSslState : inner().hostNameSslStates()) {
// Server returns null sometimes, invalid on update, so we set default
if (hostNameSslState.sslState() == null) {
hostNameSslState.withSslState(SslState.DISABLED);
}
hostNameSslStateMap.put(hostNameSslState.name(), hostNameSslState);
}
}
return (FluentT) this;
}
@Override
public String state() {
return inner().state();
}
@Override
public Set<String> hostNames() {
return Collections.unmodifiableSet(hostNamesSet);
}
@Override
public String repositorySiteName() {
return inner().repositorySiteName();
}
@Override
public UsageState usageState() {
return inner().usageState();
}
@Override
public boolean enabled() {
return inner().enabled();
}
@Override
public Set<String> enabledHostNames() {
if (enabledHostNamesSet == null) {
return null;
}
return Collections.unmodifiableSet(enabledHostNamesSet);
}
@Override
public SiteAvailabilityState availabilityState() {
return inner().availabilityState();
}
@Override
public Map<String, HostNameSslState> hostNameSslStates() {
return Collections.unmodifiableMap(hostNameSslStateMap);
}
@Override
public String appServicePlanId() {
return inner().serverFarmId();
}
@Override
public DateTime lastModifiedTime() {
return inner().lastModifiedTimeUtc();
}
@Override
public Set<String> trafficManagerHostNames() {
return Collections.unmodifiableSet(trafficManagerHostNamesSet);
}
@Override
public boolean isPremiumApp() {
return Utils.toPrimitiveBoolean(inner().premiumAppDeployed());
}
@Override
public boolean scmSiteAlsoStopped() {
return inner().scmSiteAlsoStopped();
}
@Override
public String targetSwapSlot() {
return inner().targetSwapSlot();
}
@Override
public String microService() {
return inner().microService();
}
@Override
public String gatewaySiteName() {
return inner().gatewaySiteName();
}
@Override
public boolean clientAffinityEnabled() {
return inner().clientAffinityEnabled();
}
@Override
public boolean clientCertEnabled() {
return inner().clientCertEnabled();
}
@Override
public boolean hostNamesDisabled() {
return Utils.toPrimitiveBoolean(inner().hostNamesDisabled());
}
@Override
public Set<String> outboundIpAddresses() {
return Collections.unmodifiableSet(outboundIpAddressesSet);
}
@Override
public int containerSize() {
return Utils.toPrimitiveInt(inner().containerSize());
}
@Override
public CloningInfo cloningInfo() {
return inner().cloningInfo();
}
@Override
public boolean isDefaultContainer() {
return inner().isDefaultContainer();
}
@Override
public String defaultHostName() {
return inner().defaultHostName();
}
@Override
public List<String> defaultDocuments() {
if (inner().siteConfig() == null) {
return null;
}
return Collections.unmodifiableList(inner().siteConfig().defaultDocuments());
}
@Override
public NetFrameworkVersion netFrameworkVersion() {
if (inner().siteConfig() == null) {
return null;
}
return new NetFrameworkVersion(inner().siteConfig().netFrameworkVersion());
}
@Override
public PhpVersion phpVersion() {
if (inner().siteConfig() == null || inner().siteConfig().phpVersion() == null) {
return PhpVersion.OFF;
}
return new PhpVersion(inner().siteConfig().phpVersion());
}
@Override
public PythonVersion pythonVersion() {
if (inner().siteConfig() == null || inner().siteConfig().pythonVersion() == null) {
return PythonVersion.OFF;
}
return new PythonVersion(inner().siteConfig().pythonVersion());
}
@Override
public String nodeVersion() {
if (inner().siteConfig() == null) {
return null;
}
return inner().siteConfig().nodeVersion();
}
@Override
public boolean remoteDebuggingEnabled() {
if (inner().siteConfig() == null) {
return false;
}
return Utils.toPrimitiveBoolean(inner().siteConfig().remoteDebuggingEnabled());
}
@Override
public RemoteVisualStudioVersion remoteDebuggingVersion() {
if (inner().siteConfig() == null) {
return null;
}
return new RemoteVisualStudioVersion(inner().siteConfig().remoteDebuggingVersion());
}
@Override
public boolean webSocketsEnabled() {
if (inner().siteConfig() == null) {
return false;
}
return Utils.toPrimitiveBoolean(inner().siteConfig().webSocketsEnabled());
}
@Override
public boolean alwaysOn() {
if (inner().siteConfig() == null) {
return false;
}
return Utils.toPrimitiveBoolean(inner().siteConfig().alwaysOn());
}
@Override
public JavaVersion javaVersion() {
if (inner().siteConfig() == null || inner().siteConfig().javaVersion() == null) {
return JavaVersion.OFF;
}
return new JavaVersion(inner().siteConfig().javaVersion());
}
@Override
public String javaContainer() {
if (inner().siteConfig() == null) {
return null;
}
return inner().siteConfig().javaContainer();
}
@Override
public String javaContainerVersion() {
if (inner().siteConfig() == null) {
return null;
}
return inner().siteConfig().javaContainerVersion();
}
@Override
public ManagedPipelineMode managedPipelineMode() {
if (inner().siteConfig() == null) {
return null;
}
return inner().siteConfig().managedPipelineMode();
}
@Override
public String autoSwapSlotName() {
if (inner().siteConfig() == null) {
return null;
}
return inner().siteConfig().autoSwapSlotName();
}
@Override
public Map<String, AppSetting> appSettings() {
return cachedAppSettings;
}
@Override
public Map<String, ConnectionString> connectionStrings() {
return cachedConnectionStrings;
}
@SuppressWarnings("unchecked")
Observable<FluentT> cacheAppSettingsAndConnectionStrings() {
final FluentT self = (FluentT) this;
return Observable.zip(listAppSettings(), listConnectionStrings(), listSlotConfigurations(), new Func3<StringDictionaryInner, ConnectionStringDictionaryInner, SlotConfigNamesResourceInner, FluentT>() {
@Override
public FluentT call(final StringDictionaryInner appSettingsInner, final ConnectionStringDictionaryInner connectionStringsInner, final SlotConfigNamesResourceInner slotConfigs) {
cachedAppSettings = new HashMap<>();
cachedConnectionStrings = new HashMap<>();
if (appSettingsInner != null && appSettingsInner.properties() != null) {
cachedAppSettings = Maps.asMap(appSettingsInner.properties().keySet(), new Function<String, AppSetting>() {
@Override
public AppSetting apply(String input) {
return new AppSettingImpl(input, appSettingsInner.properties().get(input),
slotConfigs.appSettingNames() != null && slotConfigs.appSettingNames().contains(input));
}
});
}
if (connectionStringsInner != null && connectionStringsInner.properties() != null) {
cachedConnectionStrings = Maps.asMap(connectionStringsInner.properties().keySet(), new Function<String, ConnectionString>() {
@Override
public ConnectionString apply(String input) {
return new ConnectionStringImpl(input, connectionStringsInner.properties().get(input),
slotConfigs.connectionStringNames() != null && slotConfigs.connectionStringNames().contains(input));
}
});
}
return self;
}
});
}
abstract Observable<SiteInner> createOrUpdateInner(SiteInner site);
abstract Observable<SiteInner> getInner();
abstract Observable<SiteConfigInner> getConfigInner();
abstract Observable<SiteConfigInner> createOrUpdateSiteConfig(SiteConfigInner siteConfig);
abstract Observable<Void> deleteHostNameBinding(String hostname);
abstract Observable<StringDictionaryInner> listAppSettings();
abstract Observable<StringDictionaryInner> updateAppSettings(StringDictionaryInner inner);
abstract Observable<ConnectionStringDictionaryInner> listConnectionStrings();
abstract Observable<ConnectionStringDictionaryInner> updateConnectionStrings(ConnectionStringDictionaryInner inner);
abstract Observable<SlotConfigNamesResourceInner> listSlotConfigurations();
abstract Observable<SlotConfigNamesResourceInner> updateSlotConfigurations(SlotConfigNamesResourceInner inner);
abstract Observable<SiteSourceControlInner> createOrUpdateSourceControl(SiteSourceControlInner inner);
abstract Observable<Void> deleteSourceControl();
@Override
public Observable<FluentT> createResourceAsync() {
if (hostNameSslStateMap.size() > 0) {
inner().withHostNameSslStates(new ArrayList<>(hostNameSslStateMap.values()));
}
final boolean emptyConfig = inner().siteConfig() == null;
if (emptyConfig) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withLocation(inner().location());
// Construct web app observable
return createOrUpdateInner(inner())
.map(new Func1<SiteInner, SiteInner>() {
@Override
public SiteInner call(SiteInner siteInner) {
if (emptyConfig) {
inner().withSiteConfig(null);
}
return siteInner;
}
})
// Submit hostname bindings
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner site) {
List<Observable<HostNameBinding>> bindingObservables = new ArrayList<>();
for (HostNameBindingImpl<FluentT, FluentImplT> binding: hostNameBindingsToCreate.values()) {
bindingObservables.add(Utils.<HostNameBinding>rootResource(binding.createAsync()));
}
for (String binding: hostNameBindingsToDelete) {
bindingObservables.add(deleteHostNameBinding(binding).map(new Func1<Object, HostNameBinding>() {
@Override
public HostNameBinding call(Object o) {
return null;
}
}));
}
if (bindingObservables.isEmpty()) {
return Observable.just(site);
} else {
return Observable.zip(bindingObservables, new FuncN<SiteInner>() {
@Override
public SiteInner call(Object... args) {
return site;
}
});
}
}
})
// refresh after hostname bindings
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(SiteInner site) {
return getInner();
}
})
// Submit SSL bindings
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner siteInner) {
List<Observable<AppServiceCertificate>> certs = new ArrayList<>();
for (final HostNameSslBindingImpl<FluentT, FluentImplT> binding : sslBindingsToCreate.values()) {
certs.add(binding.newCertificate());
hostNameSslStateMap.put(binding.inner().name(), binding.inner().withToUpdate(true));
}
siteInner.withHostNameSslStates(new ArrayList<>(hostNameSslStateMap.values()));
if (certs.isEmpty()) {
return Observable.just(siteInner);
} else {
return Observable.zip(certs, new FuncN<SiteInner>() {
@Override
public SiteInner call(Object... args) {
return siteInner;
}
}).flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(SiteInner inner) {
return createOrUpdateInner(inner);
}
});
}
}
})
// submit config
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner siteInner) {
if (inner().siteConfig() == null) {
return Observable.just(siteInner);
}
return createOrUpdateSiteConfig(inner().siteConfig())
.flatMap(new Func1<SiteConfigInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(SiteConfigInner siteConfigInner) {
siteInner.withSiteConfig(siteConfigInner);
return Observable.just(siteInner);
}
});
}
})
// app settings
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner inner) {
Observable<SiteInner> observable = Observable.just(inner);
if (!appSettingsToAdd.isEmpty() || !appSettingsToRemove.isEmpty()) {
observable = listAppSettings()
.flatMap(new Func1<StringDictionaryInner, Observable<StringDictionaryInner>>() {
@Override
public Observable<StringDictionaryInner> call(StringDictionaryInner stringDictionaryInner) {
if (stringDictionaryInner == null) {
stringDictionaryInner = new StringDictionaryInner();
stringDictionaryInner.withLocation(regionName());
}
if (stringDictionaryInner.properties() == null) {
stringDictionaryInner.withProperties(new HashMap<String, String>());
}
stringDictionaryInner.properties().putAll(appSettingsToAdd);
for (String appSettingKey : appSettingsToRemove) {
stringDictionaryInner.properties().remove(appSettingKey);
}
return updateAppSettings(stringDictionaryInner);
}
}).map(new Func1<StringDictionaryInner, SiteInner>() {
@Override
public SiteInner call(StringDictionaryInner stringDictionaryInner) {
return inner;
}
});
}
return observable;
}
})
// connection strings
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner inner) {
Observable<SiteInner> observable = Observable.just(inner);
if (!connectionStringsToAdd.isEmpty() || !connectionStringsToRemove.isEmpty()) {
observable = listConnectionStrings()
.flatMap(new Func1<ConnectionStringDictionaryInner, Observable<ConnectionStringDictionaryInner>>() {
@Override
public Observable<ConnectionStringDictionaryInner> call(ConnectionStringDictionaryInner dictionaryInner) {
if (dictionaryInner == null) {
dictionaryInner = new ConnectionStringDictionaryInner();
dictionaryInner.withLocation(regionName());
}
if (dictionaryInner.properties() == null) {
dictionaryInner.withProperties(new HashMap<String, ConnStringValueTypePair>());
}
dictionaryInner.properties().putAll(connectionStringsToAdd);
for (String connectionString : connectionStringsToRemove) {
dictionaryInner.properties().remove(connectionString);
}
return updateConnectionStrings(dictionaryInner);
}
}).map(new Func1<ConnectionStringDictionaryInner, SiteInner>() {
@Override
public SiteInner call(ConnectionStringDictionaryInner stringDictionaryInner) {
return inner;
}
});
}
return observable;
}
})
// app setting & connection string stickiness
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner inner) {
Observable<SiteInner> observable = Observable.just(inner);
if (!appSettingStickiness.isEmpty() || !connectionStringStickiness.isEmpty()) {
observable = listSlotConfigurations()
.flatMap(new Func1<SlotConfigNamesResourceInner, Observable<SlotConfigNamesResourceInner>>() {
@Override
public Observable<SlotConfigNamesResourceInner> call(SlotConfigNamesResourceInner slotConfigNamesResourceInner) {
if (slotConfigNamesResourceInner == null) {
slotConfigNamesResourceInner = new SlotConfigNamesResourceInner();
slotConfigNamesResourceInner.withLocation(regionName());
}
if (slotConfigNamesResourceInner.appSettingNames() == null) {
slotConfigNamesResourceInner.withAppSettingNames(new ArrayList<String>());
}
if (slotConfigNamesResourceInner.connectionStringNames() == null) {
slotConfigNamesResourceInner.withConnectionStringNames(new ArrayList<String>());
}
Set<String> stickyAppSettingKeys = new HashSet<>(slotConfigNamesResourceInner.appSettingNames());
Set<String> stickyConnectionStringNames = new HashSet<>(slotConfigNamesResourceInner.connectionStringNames());
for (Map.Entry<String, Boolean> stickiness : appSettingStickiness.entrySet()) {
if (stickiness.getValue()) {
stickyAppSettingKeys.add(stickiness.getKey());
} else {
stickyAppSettingKeys.remove(stickiness.getKey());
}
}
for (Map.Entry<String, Boolean> stickiness : connectionStringStickiness.entrySet()) {
if (stickiness.getValue()) {
stickyConnectionStringNames.add(stickiness.getKey());
} else {
stickyConnectionStringNames.remove(stickiness.getKey());
}
}
slotConfigNamesResourceInner.withAppSettingNames(new ArrayList<>(stickyAppSettingKeys));
slotConfigNamesResourceInner.withConnectionStringNames(new ArrayList<>(stickyConnectionStringNames));
return updateSlotConfigurations(slotConfigNamesResourceInner);
}
}).map(new Func1<SlotConfigNamesResourceInner, SiteInner>() {
@Override
public SiteInner call(SlotConfigNamesResourceInner slotConfigNamesResourceInner) {
return inner;
}
});
}
return observable;
}
})
// create source control
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner inner) {
if (sourceControl == null || sourceControlToDelete) {
return Observable.just(inner);
}
return sourceControl.registerGithubAccessToken()
.flatMap(new Func1<SourceControlInner, Observable<SiteSourceControlInner>>() {
@Override
public Observable<SiteSourceControlInner> call(SourceControlInner sourceControlInner) {
return createOrUpdateSourceControl(sourceControl.inner());
}
})
.map(new Func1<SiteSourceControlInner, SiteInner>() {
@Override
public SiteInner call(SiteSourceControlInner siteSourceControlInner) {
return inner;
}
});
}
})
// delete source control
.flatMap(new Func1<SiteInner, Observable<SiteInner>>() {
@Override
public Observable<SiteInner> call(final SiteInner inner) {
if (!sourceControlToDelete) {
return Observable.just(inner);
}
return deleteSourceControl().map(new Func1<Void, SiteInner>() {
@Override
public SiteInner call(Void aVoid) {
return inner;
}
});
}
})
// convert from inner
.map(new Func1<SiteInner, FluentT>() {
@Override
public FluentT call(SiteInner siteInner) {
setInner(siteInner);
return normalizeProperties();
}
}).flatMap(new Func1<FluentT, Observable<FluentT>>() {
@Override
public Observable<FluentT> call(FluentT fluentT) {
return cacheAppSettingsAndConnectionStrings();
}
});
}
WebAppBaseImpl<FluentT, FluentImplT> withNewHostNameSslBinding(final HostNameSslBindingImpl<FluentT, FluentImplT> hostNameSslBinding) {
if (hostNameSslBinding.newCertificate() != null) {
sslBindingsToCreate.put(hostNameSslBinding.name(), hostNameSslBinding);
}
return this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withManagedHostnameBindings(AppServiceDomain domain, String... hostnames) {
for (String hostname : hostnames) {
if (hostname.equals("@") || hostname.equalsIgnoreCase(domain.name())) {
defineHostnameBinding()
.withAzureManagedDomain(domain)
.withSubDomain(hostname)
.withDnsRecordType(CustomHostNameDnsRecordType.A)
.attach();
} else {
defineHostnameBinding()
.withAzureManagedDomain(domain)
.withSubDomain(hostname)
.withDnsRecordType(CustomHostNameDnsRecordType.CNAME)
.attach();
}
}
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public HostNameBindingImpl<FluentT, FluentImplT> defineHostnameBinding() {
HostNameBindingInner inner = new HostNameBindingInner();
inner.withSiteName(name());
inner.withLocation(regionName());
inner.withAzureResourceType(AzureResourceType.WEBSITE);
inner.withAzureResourceName(name());
inner.withHostNameType(HostNameType.VERIFIED);
return new HostNameBindingImpl<>(inner, (FluentImplT) this, client);
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withThirdPartyHostnameBinding(String domain, String... hostnames) {
for (String hostname : hostnames) {
defineHostnameBinding()
.withThirdPartyDomain(domain)
.withSubDomain(hostname)
.withDnsRecordType(CustomHostNameDnsRecordType.CNAME)
.attach();
}
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutHostnameBinding(String hostname) {
hostNameBindingsToDelete.add(hostname);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutSslBinding(String hostname) {
if (hostNameSslStateMap.containsKey(hostname)) {
hostNameSslStateMap.get(hostname).withSslState(SslState.DISABLED).withToUpdate(true);
}
return (FluentImplT) this;
}
@SuppressWarnings("unchecked")
FluentImplT withHostNameBinding(final HostNameBindingImpl<FluentT, FluentImplT> hostNameBinding) {
this.hostNameBindingsToCreate.put(
hostNameBinding.name(),
hostNameBinding);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withAppDisabledOnCreation() {
inner().withEnabled(false);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withScmSiteAlsoStopped(boolean scmSiteAlsoStopped) {
inner().withScmSiteAlsoStopped(scmSiteAlsoStopped);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withClientAffinityEnabled(boolean enabled) {
inner().withClientAffinityEnabled(enabled);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withClientCertEnabled(boolean enabled) {
inner().withClientCertEnabled(enabled);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public HostNameSslBindingImpl<FluentT, FluentImplT> defineSslBinding() {
return new HostNameSslBindingImpl<>(new HostNameSslState(), (FluentImplT) this, myManager);
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withNetFrameworkVersion(NetFrameworkVersion version) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withNetFrameworkVersion(version.toString());
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withPhpVersion(PhpVersion version) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withPhpVersion(version.toString());
return (FluentImplT) this;
}
@Override
public FluentImplT withoutPhp() {
return withPhpVersion(new PhpVersion(""));
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withJavaVersion(JavaVersion version) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withJavaVersion(version.toString());
return (FluentImplT) this;
}
@Override
public FluentImplT withoutJava() {
return withJavaVersion(new JavaVersion("")).withWebContainer(null);
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withWebContainer(WebContainer webContainer) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
if (webContainer == null) {
inner().siteConfig().withJavaContainer(null);
inner().siteConfig().withJavaContainerVersion(null);
} else {
String[] containerInfo = webContainer.toString().split(" ");
inner().siteConfig().withJavaContainer(containerInfo[0]);
inner().siteConfig().withJavaContainerVersion(containerInfo[1]);
}
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withPythonVersion(PythonVersion version) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withPythonVersion(version.toString());
return (FluentImplT) this;
}
@Override
public FluentImplT withoutPython() {
return withPythonVersion(new PythonVersion(""));
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withPlatformArchitecture(PlatformArchitecture platform) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withUse32BitWorkerProcess(platform.equals(PlatformArchitecture.X86));
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withWebSocketsEnabled(boolean enabled) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withWebSocketsEnabled(enabled);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withWebAppAlwaysOn(boolean alwaysOn) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withAlwaysOn(alwaysOn);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withManagedPipelineMode(ManagedPipelineMode managedPipelineMode) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withManagedPipelineMode(managedPipelineMode);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withAutoSwapSlotName(String slotName) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withAutoSwapSlotName(slotName);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withRemoteDebuggingEnabled(RemoteVisualStudioVersion remoteVisualStudioVersion) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withRemoteDebuggingEnabled(true);
inner().siteConfig().withRemoteDebuggingVersion(remoteVisualStudioVersion.toString());
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withRemoteDebuggingDisabled() {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withRemoteDebuggingEnabled(false);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withDefaultDocument(String document) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
if (inner().siteConfig().defaultDocuments() == null) {
inner().siteConfig().withDefaultDocuments(new ArrayList<String>());
}
inner().siteConfig().defaultDocuments().add(document);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withDefaultDocuments(List<String> documents) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
if (inner().siteConfig().defaultDocuments() == null) {
inner().siteConfig().withDefaultDocuments(new ArrayList<String>());
}
inner().siteConfig().defaultDocuments().addAll(documents);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutDefaultDocument(String document) {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
if (inner().siteConfig().defaultDocuments() != null) {
inner().siteConfig().defaultDocuments().remove(document);
}
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withAppSetting(String key, String value) {
appSettingsToAdd.put(key, value);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withAppSettings(Map<String, String> settings) {
appSettingsToAdd.putAll(settings);
return (FluentImplT) this;
}
@Override
public FluentImplT withStickyAppSetting(String key, String value) {
withAppSetting(key, value);
return withAppSettingStickiness(key, true);
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withStickyAppSettings(Map<String, String> settings) {
withAppSettings(settings);
appSettingStickiness.putAll(Maps.asMap(settings.keySet(), new Function<String, Boolean>() {
@Override
public Boolean apply(String input) {
return true;
}
}));
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutAppSetting(String key) {
appSettingsToRemove.add(key);
appSettingStickiness.remove(key);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withAppSettingStickiness(String key, boolean sticky) {
appSettingStickiness.put(key, sticky);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withConnectionString(String name, String value, ConnectionStringType type) {
connectionStringsToAdd.put(name, new ConnStringValueTypePair().withValue(value).withType(type));
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withStickyConnectionString(String name, String value, ConnectionStringType type) {
connectionStringsToAdd.put(name, new ConnStringValueTypePair().withValue(value).withType(type));
connectionStringStickiness.put(name, true);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutConnectionString(String name) {
connectionStringsToRemove.add(name);
connectionStringStickiness.remove(name);
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withConnectionStringStickiness(String name, boolean stickiness) {
connectionStringStickiness.put(name, stickiness);
return (FluentImplT) this;
}
@SuppressWarnings("unchecked")
FluentImplT withSourceControl(WebAppSourceControlImpl<FluentT, FluentImplT> sourceControl) {
this.sourceControl = sourceControl;
return (FluentImplT) this;
}
@Override
public WebAppSourceControlImpl<FluentT, FluentImplT> defineSourceControl() {
SiteSourceControlInner sourceControlInner = new SiteSourceControlInner();
sourceControlInner.withLocation(regionName());
return new WebAppSourceControlImpl<>(sourceControlInner, this, serviceClient);
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withLocalGitSourceControl() {
if (inner().siteConfig() == null) {
inner().withSiteConfig(new SiteConfigInner());
}
inner().siteConfig().withScmType("LocalGit");
return (FluentImplT) this;
}
@Override
@SuppressWarnings("unchecked")
public FluentImplT withoutSourceControl() {
sourceControlToDelete = true;
return (FluentImplT) this;
}
@Override
public FluentT refresh() {
SiteInner inner = getInner().toBlocking().single();
inner.withSiteConfig(getConfigInner().toBlocking().single());
setInner(inner);
return this.cacheAppSettingsAndConnectionStrings().toBlocking().single();
}
}
| |
package main.java.Ionex;
import java.awt.*;
class ImageCanvas extends Canvas implements Runnable {
final int COLLOY = 25;
final int COLHIY = 226;
final int COLLOX = 280;
final int COLHIX = 376;
final int delay = 100; //milliseconds between frames
final int DETECTORIGINX = 6;
final int DETECTORIGINY = 309;
final int DETECTPEAK = 259;
final int DETECTTOP = 275;
Thread m_animator;
Image m_offscreen;
Graphics m_offgraphics;
Image m_imgBack;
int m_nTime = 0;
Ionex m_theExp;
Point m_pLastConcen;
Point m_pNewConcen;
Point m_pNewDetect;
Point m_pLastDetect;
double m_dTopConc; // concentration entering column
double m_dBottomConc; // concentration leaving column
public ImageCanvas(Ionex theExp) {
m_theExp = theExp;
m_pLastConcen = new Point(DETECTORIGINX, DETECTORIGINY);
m_pLastDetect = new Point(DETECTORIGINX, DETECTORIGINY);
m_pNewConcen = new Point(DETECTORIGINX, DETECTORIGINY);
m_pNewDetect = new Point(DETECTORIGINX, DETECTORIGINY);
}
public void start() {
if (m_animator == null) {
m_animator = new Thread(this);
}
m_animator.start();
}
public void pause() {
m_animator = null;
}
public void stop() {
// m_animator.stop();
m_animator = null;
//reinitialize
m_nTime = 0;
m_pLastConcen.move(DETECTORIGINX, DETECTORIGINY);
m_pLastDetect.move(DETECTORIGINX, DETECTORIGINY);
m_pNewConcen.move(DETECTORIGINX, DETECTORIGINY);
m_pNewDetect.move(DETECTORIGINX, DETECTORIGINY);
}
public void run() {
//Just to be nice, lower this thread's priority
//so it can't interfere with other processing going on.
Thread.currentThread().setPriority(Thread.MIN_PRIORITY);
//Remember the starting time.
long startTime = System.currentTimeMillis();
//This is the animation loop.
while (Thread.currentThread() == m_animator) {
//Advance the animation frame.
animate();
//Display it.
repaint();
//Delay depending on how far we are behind.
try {
startTime += delay;
Thread.sleep(Math.max(0,
startTime - System.currentTimeMillis()));
} catch (InterruptedException e) {
break;
}
}
}
public void paint(Graphics g) {
update(g);
}
public synchronized void update(Graphics g) {
//get the background image
if (m_offscreen == null) {
m_offscreen = createImage(488, 395);
m_offgraphics = m_offscreen.getGraphics();
// reset the background image
m_offgraphics.drawImage(m_imgBack, 0, 0, null);
}
//now draw the protein bands
drawProteins(m_offgraphics);
//now draw the detector
m_offgraphics.setColor(Color.blue);
m_offgraphics.drawLine(m_pLastDetect.x, m_pLastDetect.y,
m_pNewDetect.x, m_pNewDetect.y);
//now draw the concentration graph
m_offgraphics.setColor(Color.red);
m_offgraphics.drawLine(m_pLastConcen.x, m_pLastConcen.y,
m_pNewConcen.x, m_pNewConcen.y);
// now draw the concentrations at the top and bottom of the column
m_offgraphics.setColor(Color.white);
m_offgraphics.fillRect(COLHIX + 5, COLLOY, 50, 10);
m_offgraphics.fillRect(COLHIX + 5, COLHIY - 10, 50, 10);
String strConc = new String(String.valueOf(m_dTopConc));
m_offgraphics.setColor(Color.gray);
m_offgraphics.drawString(formatFloat(strConc), COLHIX + 5, COLLOY + 10);
strConc = String.valueOf(m_dBottomConc);
m_offgraphics.drawString(formatFloat(strConc), COLHIX + 5, COLHIY);
//now actually draw to the screen
g.drawImage(m_offscreen, 0, 0, null);
}
private String formatFloat(String strF) {
String str;
int nPos = strF.indexOf('.');
if ((nPos < 0) || (nPos + 3 > strF.length())) {
str = new String(strF);
} else {
str = new String(strF.substring(0, nPos + 3));
}
return str;
}
public void resetBackground() {
// reset the background image
m_offgraphics.drawImage(m_imgBack, 0, 0, null);
//call repaint to draw to the screen
repaint();
}
public void prepareBackground() {
// reset the background image
m_offgraphics.drawImage(m_imgBack, 0, 0, null);
//redraw the names of the proteins
for (int i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
if (m_theExp.m_arrProteins[i].m_bMix) {
//use the color for mixed proteins
m_offgraphics.setColor(m_theExp.m_colors[5]);
} else {
m_offgraphics.setColor(m_theExp.m_colors[i]);
}
m_offgraphics.drawString(m_theExp.m_arrProteins[i].getName(), 10, 344 + (i * 11));
}
//call repaint to draw to the screen
repaint();
}
public void animate() {
m_nTime++;
if (m_nTime >= 460) {
// we're done
m_theExp.processStop();
return;
}
//move the proteins in the column
moveProteins();
//move the concentration graph point
m_pLastConcen.move(m_pNewConcen.x, m_pNewConcen.y);
calcConc();
//move the detector graph point
m_pLastDetect.move(m_pNewDetect.x, m_pNewDetect.y);
calcDetect();
repaint();
}
void moveProteins() {
double dConc; // concentration at protein location
CProtein protein;
int i;
//loop through the proteins and determine their positions
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
protein = m_theExp.m_arrProteins[i];
if (protein.m_bBound) {
// find the concentration at the location of the protein
if (m_nTime <= (150 + protein.m_nPos)) {
// the time for the initial wash to move through the column
dConc = m_theExp.m_dConc1;
} else {
if (m_nTime <= (300 + protein.m_nPos)) {
// concentration entering column changes
dConc = m_theExp.m_dConc1 +
(float) (((m_theExp.m_dConc2 - m_theExp.m_dConc1) / 150) *
(m_nTime - 150 - protein.m_nPos));
} else {
// only the high concentration now
dConc = m_theExp.m_dConc2;
}
}
// determine if the protein is still bound or not
if (Math.abs(protein.m_dCharge) < (dConc * 100)) {
protein.m_bBound = false;
}
}
// protein is not bound, move it
if (!protein.m_bBound) {
(m_theExp.m_arrProteins[i]).moveProtein();
}
}
}
void calcConc() {
//find the concentration of the solvent entering and leaving the column
if (m_nTime <= 150) {
m_dTopConc = m_theExp.m_dConc1;
m_dBottomConc = m_theExp.m_dConc1;
} else {
if (m_nTime <= 300) {
// concentration entering column changes
// the time for the initial wash to move through the column
m_dTopConc = (m_theExp.m_dConc1 +
(float) (((m_theExp.m_dConc2 - m_theExp.m_dConc1) / 150) * (m_nTime - 150)));
m_dBottomConc = m_theExp.m_dConc1;
} else {
//only final wash entering column
if (m_nTime <= 450) {
m_dTopConc = m_theExp.m_dConc2;
m_dBottomConc = (m_theExp.m_dConc1 +
(float) (((m_theExp.m_dConc2 - m_theExp.m_dConc1) / 150) * (m_nTime - 300)));
} else {
// only the high concentration now
m_dTopConc = m_theExp.m_dConc2;
m_dBottomConc = m_theExp.m_dConc2;
}
}
}
//move the graph point
m_pNewConcen.move(DETECTORIGINX + m_nTime,
DETECTORIGINY - 1 - (int) (m_dBottomConc * (DETECTORIGINY - DETECTTOP)));
}
void calcDetect() {
int i;
int nPos;
int nMaxAmount = 0, nMixAmount = 0;
int nAmount;
int nHeight;
int nNewPoint;
//move the x
m_pNewDetect.move(DETECTORIGINX + m_nTime, DETECTORIGINY);
// loop through the proteins, to find the largest amount entered
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
nMaxAmount = Math.max(nMaxAmount, m_theExp.m_arrProteins[i].m_nAmount);
}
//now see if there are any that are being eluted together and calculate that amount
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
if (m_theExp.m_arrProteins[i].m_bMix) {
nMixAmount += m_theExp.m_arrProteins[i].m_nAmount;
}
}
nMaxAmount = Math.max(nMaxAmount, nMixAmount);
// loop through the proteins, see if there's any near the bottom
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
//calculate the detector position for this proteins peak
nHeight = DETECTORIGINY - DETECTPEAK;
if (m_theExp.m_arrProteins[i].m_bMix) {
nAmount = nMixAmount;
} else {
nAmount = m_theExp.m_arrProteins[i].m_nAmount;
}
nHeight = (int) (nHeight * (float) ((float) nAmount / (float) nMaxAmount)); //force it to use floats!
nPos = m_theExp.m_arrProteins[i].m_nPos;
switch (Math.abs((COLHIY - COLLOY + 2) - (nPos + 1))) {
case 2:
nHeight = (int) (nHeight * (4.0 / 7.0));
break;
case 1:
nHeight = (int) (nHeight * (6.0 / 7.0));
break;
case 0:
break;
default:
nHeight = 0;
}
nNewPoint = DETECTORIGINY - nHeight;
// if the point is already set, only set it if
// the new setting shows a higher detector response
if (m_pNewDetect.y > nNewPoint) {
m_pNewDetect.y = nNewPoint;
}
}
}
public void setImage(Image img) {
m_imgBack = img;
}
void drawProteins(Graphics g) {
int i;
//erase the column
g.setColor(Color.white);
g.fillRect(COLLOX + 1, COLLOY + 1, COLHIX - COLLOX - 1, COLHIY - COLLOY - 1);
//draw bound proteins first
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
if (m_theExp.m_arrProteins[i].m_bBound) {
drawProteinBand(g, i);
}
}
// now draw mobil ones
for (i = 0; i < m_theExp.m_arrProteins.length; i++) {
if (m_theExp.m_arrProteins[i] == null) {
continue;
}
if (!m_theExp.m_arrProteins[i].m_bBound) {
drawProteinBand(g, i);
}
}
}
void drawProteinBand(Graphics g, int nProtein) {
Rectangle rect;
int nPos, nWidth; //placement and width of protein band
nPos = m_theExp.m_arrProteins[nProtein].m_nPos;
nWidth = m_theExp.m_arrProteins[nProtein].m_nBandwidth;
//draw the protein if its still in the column
if (nPos >= (COLHIY - COLLOY)) {
return;
}
rect = new Rectangle(COLLOX + 1, COLLOY + nPos,
COLHIX - COLLOX - 1, nWidth);
// don't draw anything outside of the column
if ((rect.y + rect.height) >= COLHIY) {
rect.height = COLHIY - rect.y;
}
// draw the protein with the correct color
if (m_theExp.m_arrProteins[nProtein].m_bMix) {
g.setColor(m_theExp.m_colors[5]);
} else {
g.setColor(m_theExp.m_colors[nProtein]);
}
g.fillRect(rect.x, rect.y, rect.width, rect.height);
}
}
| |
package com.lean56.andplug.app.activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.inputmethod.EditorInfo;
import android.widget.Button;
import android.widget.TextView;
import com.lean56.andplug.activity.BaseActivity;
import com.lean56.andplug.app.AppContext;
import com.lean56.andplug.app.R;
import com.rengwuxian.materialedittext.MaterialEditText;
/**
* A login_bg screen that offers login_bg via username/password.
*
* @author Charles
*/
public class LoginActivity extends BaseActivity {
/**
* Keep track of the login_bg task to ensure we can cancel it if requested.
*/
private UserLoginTask mAuthTask = null;
// UI references.
private MaterialEditText mUsernameEdit;
private MaterialEditText mPwdEdit;
private Button mLoginBtn;
private TextView mForgetPwdText;
private TextView mRegisterText;
@Override
protected int getContentView() {
return R.layout.login;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// set up the login form.
mUsernameEdit = (MaterialEditText) findViewById(R.id.et_username);
mUsernameEdit.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_NEXT) {
mPwdEdit.requestFocus();
}
return true;
}
});
mPwdEdit = (MaterialEditText) findViewById(R.id.et_pwd);
mPwdEdit.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView textView, int actionId, KeyEvent keyEvent) {
if (actionId == EditorInfo.IME_ACTION_DONE) {
attemptLogin();
return true;
}
return false;
}
});
mLoginBtn = (Button) findViewById(R.id.btn_login);
mLoginBtn.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
attemptLogin();
}
});
}
@Override
protected void addMenuItem(Menu menu) {
// add menu like this
menu.add(Menu.NONE, Menu.FIRST, Menu.NONE, R.string.register).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
}
@Override
protected boolean onFirstMenuSelected(MenuItem item) {
AppContext.showToast("zhuce");
return true;
}
/**
* Authenticate login & password
*/
public void attemptLogin() {
if (mAuthTask != null) {
return;
}
// Store values at the time of the login_bg attempt.
String username = mUsernameEdit.getText().toString();
String password = mPwdEdit.getText().toString();
boolean cancel = false;
MaterialEditText focusView = null;
// Check for a valid password, if the user entered one.
if (!TextUtils.isEmpty(password) && !isPasswordValid(password)) {
mPwdEdit.setError(getString(R.string.error_invalid_password));
focusView = mPwdEdit;
cancel = true;
}
// Check for a valid username.
if (TextUtils.isEmpty(username)) {
mUsernameEdit.setError(getString(R.string.error_field_required));
focusView = mUsernameEdit;
cancel = true;
} else if (!isUsernameValid(username)) {
mUsernameEdit.setError(getString(R.string.error_invalid_username));
focusView = mUsernameEdit;
cancel = true;
}
if (cancel) {
// There was an error; don't attempt login_bg and focus the first
// form field with an error.
focusView.requestFocus();
} else {
// Show a progress spinner, and kick off a background task to
// perform the user login_bg attempt.
showProgress(true);
mAuthTask = new UserLoginTask(username, password);
mAuthTask.execute((Void) null);
}
}
private boolean isUsernameValid(String username) {
return username.length() > 0;
}
private boolean isPasswordValid(String password) {
return password.length() > 0;
}
/**
* Shows the progress UI and hides the login_bg form.
*/
public void showProgress(final boolean show) {
/*int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime);
mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE);
mLoginFormView.animate().setDuration(shortAnimTime).alpha(
show ? 0 : 1).setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE);
}
});
mProgressView.setVisibility(show ? View.VISIBLE : View.GONE);
mProgressView.animate().setDuration(shortAnimTime).alpha(
show ? 1 : 0).setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mProgressView.setVisibility(show ? View.VISIBLE : View.GONE);
}
});*/
}
/**
* Represents an asynchronous login_bg/registration task used to authenticate
* the user.
*/
public class UserLoginTask extends AsyncTask<Void, Void, Boolean> {
private final String mEmail;
private final String mPassword;
UserLoginTask(String email, String password) {
mEmail = email;
mPassword = password;
}
@Override
protected Boolean doInBackground(Void... params) {
// TODO: attempt authentication against a network service.
/*try {
// Simulate network access.
Thread.sleep(2000);
} catch (InterruptedException e) {
return false;
}
for (String credential : DUMMY_CREDENTIALS) {
String[] pieces = credential.split(":");
if (pieces[0].equals(mEmail)) {
// Account exists, return true if the password matches.
return pieces[1].equals(mPassword);
}
}*/
// TODO: register the new account here.
return true;
}
@Override
protected void onPostExecute(final Boolean success) {
mAuthTask = null;
showProgress(false);
if (success) {
finish();
} else {
mPwdEdit.setError(getString(R.string.error_invalid_password));
mPwdEdit.requestFocus();
}
}
@Override
protected void onCancelled() {
mAuthTask = null;
showProgress(false);
}
}
}
| |
/*
* Copyright 2015 Rhythm & Hues Studios.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rhythm.louie.request;
import java.io.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Joiner;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.Message;
import org.joda.time.DateTime;
import org.joda.time.Instant;
import org.joda.time.Interval;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.PeriodFormatter;
import org.joda.time.format.PeriodFormatterBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rhythm.louie.email.EmailService;
import com.rhythm.louie.request.data.DataType;
import com.rhythm.louie.request.data.Result;
import com.rhythm.louie.server.*;
import com.rhythm.louie.services.auth.*;
import com.rhythm.pb.RequestProtos.ErrorPB;
import com.rhythm.pb.RequestProtos.IdentityPB;
import com.rhythm.pb.RequestProtos.RequestHeaderPB;
import com.rhythm.pb.RequestProtos.RequestPB;
import com.rhythm.pb.RequestProtos.ResponseHeaderPB;
import com.rhythm.pb.RequestProtos.ResponsePB;
import com.rhythm.pb.RequestProtos.RoutePB;
import com.rhythm.pb.RequestProtos.SessionKey;
/**
*
* @author cjohnson
*/
public class ProtoProcessor implements ProtoProcess {
private final Logger LOGGER = LoggerFactory.getLogger(ProtoProcessor.class);
private final Pattern userCN;
private final boolean secured;
private static final Map<Long, RequestContext> currentRequestMap = new ConcurrentHashMap<>();
public ProtoProcessor() {
secured = Server.getLocal().isSecure();
userCN = Pattern.compile(".*CN=([\\w\\s]+),*.*");
AlertProperties prop = AlertProperties.getProperties(AlertProperties.REQUEST);
if (prop != null) {
int cycle = prop.getMonitorPollCycle();
TaskScheduler.getInstance().scheduleWithFixedDelay(new RequestMonitor(prop), cycle, cycle, TimeUnit.SECONDS);
LOGGER.info("Request Monitor started");
} else {
LOGGER.info("No Request Monitor configured");
}
}
@Override
public void processRequest(InputStream input, OutputStream output, RequestProperties props) throws UnauthorizedSessionException, IOException, Exception {
long start = System.nanoTime();
RequestHeaderPB header = RequestHeaderPB.parseDelimitedFrom(input);
if (header.getCount()>1) {
throw new Exception("Batching Requests is not supported!");
}
IdentityPB identity = null;
SessionKey sessionKey = null;
if (header.hasKey()) {
SessionStat session = AuthUtils.accessSession(header.getKey());
identity = session.getIdentity();
} else { //initial request, we will handle creating and returning a key
if (header.hasIdentity()) { //to make backwards compatible
identity = header.getIdentity();
if (secured) {
Matcher match;
try {
match = userCN.matcher(props.getRemoteUser());
} catch (NullPointerException ex) {
LOGGER.error("IMPROPERLY CONFIGURED DEPLOYMENT. This instance is configured to be secure, "
+ "but the container has not provided authorization/validation. Please check your web.xml");
throw new Exception("Improperly configured secure server. Please contact your server admin.");
}
if (match.matches()) {
if (identity.getUser().equalsIgnoreCase(match.group(1))) {
sessionKey = AuthUtils.createKey(identity);
}
}
} else {
sessionKey = AuthUtils.createKey(identity);
}
}
if (sessionKey == null && secured) {
throw new UnauthenticatedException("Unable to create a Session Key, likely due to authentication failure");
}
}
ResponseHeaderPB.Builder responseHeader = ResponseHeaderPB.newBuilder();
responseHeader.setCount(header.getCount());
if (sessionKey != null) {
responseHeader.setKey(sessionKey);
}
responseHeader.build().writeDelimitedTo(output);
for (int r = 0; r < header.getCount(); r++) {
RequestPB request = RequestPB.parseDelimitedFrom(input);
if (request == null) {
throw new Exception("Improper Request format! Reached EOF prematurely! @ProtoProcessor.processRequest()");
}
if (request.hasRouteUser() && identity == null) {
throw new Exception("User Route Permission Denied!");
}
RequestContext requestContext = null;
Result result = null;
RoutePB localRoute = props.createRoute(request.getService());
for (RoutePB route : request.getRouteList()) {
if (route.equals(localRoute)) {
throw new Exception("Route Loop Detected! "+route.getHostIp()+"/"+route.getGateway()+" visited twice!");
}
}
try {
requestContext = new RequestContext(header, request, DataType.PB, props);
if (header.hasIdentity()) {
requestContext.setSessionKey(sessionKey);
}
requestContext.setIdentity(identity);
requestContext.readPBParams(input);
requestContext.setRoute(localRoute);
requestContext.setThreadId(Thread.currentThread().getId());
currentRequestMap.put(requestContext.getThreadID(), requestContext);
result = RequestHandler.processSingleRequest(requestContext);
result.setExecTime((System.nanoTime() - start) / 1000000);
handleResult(requestContext, result, output);
} catch (Exception e) {
String errorMessage = e.getMessage() == null ? e.toString() : e.getMessage();
LOGGER.error("ProtoProcessor caught error: "+errorMessage,e);
if (result != null) {
result.addError(e);
} else {
result = Result.errorResult(e);
}
} finally {
long end = System.nanoTime();
if (requestContext == null) {
LOGGER.error("Unknown Error, Request is null");
} else {
if (result == null) {
result = Result.errorResult(null);
}
result.setDuration((end - start) / 1000000);
try {
RequestHandler.logRequest(requestContext, result);
} catch (Exception le) {
LOGGER.error("Error Logging: {}", le.getMessage());
}
}
currentRequestMap.remove(Thread.currentThread().getId());
start = end;
}
}
}
private void handleResult(RequestContext requestContext,Result result,OutputStream output) throws Exception {
CodedOutputStream codedOutput = CodedOutputStream.newInstance(output);
ResponsePB.Builder responseBuilder = ResponsePB.newBuilder();
responseBuilder.setId(requestContext.getRequest().getId());
if (result.isError()) {
ErrorPB.Builder error = ErrorPB.newBuilder();
error.setCode(500);
Exception ex = result.getException();
if (ex!=null) {
error.setType(ex.getClass().getSimpleName());
if (ex.getMessage()!=null) {
error.setDescription(ex.getMessage());
}
} else {
error.setType("Unknown Exception");
}
responseBuilder.setError(error);
}
responseBuilder.addRouteBuilder()
.setRoute(requestContext.getRoute())
.addAllPath(requestContext.getDesinationRoutes());
if (result.getMessages().isEmpty()) {
responseBuilder.setCount(0);
ResponsePB response = responseBuilder.build();
codedOutput.writeRawVarint32(response.getSerializedSize());
response.writeTo(codedOutput);
} else {
responseBuilder.setCount(result.getMessages().size());
boolean first = true;
long totalSize = 0;
for (Object oMessage : result.getMessages()) {
Message message = (Message) oMessage;
if (first) {
responseBuilder.setType(message.getDescriptorForType().getFullName());
ResponsePB response = responseBuilder.build();
codedOutput.writeRawVarint32(response.getSerializedSize());
response.writeTo(codedOutput);
first = false;
}
int serializedSize = message.getSerializedSize();
codedOutput.writeRawVarint32(serializedSize);
message.writeTo(codedOutput);
if (result.isStreaming()) {
codedOutput.flush();
output.flush();
}
totalSize+=serializedSize;
}
result.setSize(totalSize);
}
codedOutput.flush();
output.flush();
}
public static List<RequestPB> getActiveRequests() {
List<RequestPB> reqs = new ArrayList<>();
for (RequestContext ctx : currentRequestMap.values()) {
reqs.add(ctx.getRequestThreadContext());
}
return reqs;
}
private List<RequestContext> getLongRunningRequests(long msDuration) {
List<RequestContext> longrunning = new ArrayList<>();
long currentTime = System.nanoTime()/1000000;
for (RequestContext ctx : currentRequestMap.values()) {
if (currentTime - ctx.getCreateInstant() > msDuration) {
longrunning.add(ctx);
}
}
return longrunning;
}
private class RequestMonitor implements Runnable {
private Set<Long> trackedThreads = new HashSet<>();
private final DateTimeFormatter fmt;
private long duration = 120000L;
private final int summaryHour;
private final String email;
private int hour;
public RequestMonitor(AlertProperties prop) {
fmt = DateTimeFormat.forPattern("MM/dd/yyy HH:mm:ss");
duration = prop.getDuration();
summaryHour = prop.getSummaryHour();
email = prop.getEmail();
hour = -1;
}
private final PeriodFormatter timeFmt = new PeriodFormatterBuilder()
.appendHours()
.appendSuffix(" hour", " hours")
.appendSeparator(" ")
.appendMinutes()
.appendSuffix(" minute", " minutes")
.appendSeparator(" ")
.appendSeconds()
.appendSuffix(" second", " seconds")
.toFormatter();
@Override
public void run() {
//hour threshold passing logic
boolean genSummary = false;
int currentHour = new DateTime(System.currentTimeMillis()).getHourOfDay();
if (currentHour == summaryHour && currentHour != hour) {
genSummary = true;
}
hour = currentHour;
List<RequestContext> requests = getLongRunningRequests(duration);
Server local = Server.getLocal();
String subject = local.getHostName() +" ("+ local.getIp() +"/"+ local.getGateway()
+") ["+ local.getName() +"] Louie Request Monitor";
if (!requests.isEmpty()) {
Set<Long> foundThreads = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (RequestContext ctx : requests) {
if (!trackedThreads.contains(ctx.getThreadID()) || genSummary) { //warn only once per request or for summary
sb.append("Thread ID: ").append(ctx.getThreadID()).append("\n");
sb.append("SessionKey: ").append(ctx.getSessionKey()).append("\n");
sb.append("User: ").append(ctx.getWho()).append("\n");
sb.append("IP: ").append(ctx.getRequestProperties().getRemoteAddress()).append("\n");
sb.append("Module: ").append(ctx.getModule()).append("\n");
sb.append("Language: ").append(ctx.getLanguage()).append("\n");
DateTime create = new DateTime(ctx.getCreateTime());
sb.append("Start time: ").append(fmt.print(create)).append("\n");
sb.append("Duration: ");
sb.append(timeFmt.print(new Interval(create, new Instant()).toPeriod())).append("\n");
sb.append("Request: ");
sb.append(ctx.getRequest().getService()).append(":");
sb.append(ctx.getRequest().getMethod()).append("(");
if (ctx.getRequest().getTypeCount() > 0) {
Joiner.on(",").appendTo(sb, ctx.getRequest().getTypeList());
}
sb.append(")");
if (!ctx.getParams().isEmpty()) {
sb.append(" - ");
sb.append("(");
RequestHandler.appendListString(sb,ctx.getParams());
sb.append(")");
}
sb.append("\n").append("Stacktrace: \n");
sb.append(ThreadInspector.INSTANCE.dumpStack(ctx.getThreadID(), 15));
sb.append("\n\n");
}
foundThreads.add(ctx.getThreadID());
}
Set<Long> cleared = new HashSet<>(trackedThreads);
cleared.removeAll(foundThreads);
if (!cleared.isEmpty()) {
sb.append("Cleared thread IDs:\t");
for (long l : cleared) {
sb.append(l).append("\t");
}
}
trackedThreads = foundThreads;
if (sb.length() > 0) {
LOGGER.info("Request Monitor Update:\n{}",sb.toString());
try {
EmailService.getInstance().sendMail(email, email, subject, sb.toString());
} catch (Exception ex) {
LOGGER.error(ex.toString());
}
}
} else {
if (!trackedThreads.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("Cleared thread IDs:\t");
for (long l : trackedThreads) {
sb.append(l).append("\t");
}
trackedThreads.clear();
LOGGER.info("Request Monitor Update:\n{}",sb.toString());
try {
EmailService.getInstance().sendMail(email, email, subject, sb.toString());
// TODO drive addresses via properties
} catch (Exception ex) {
LOGGER.error(ex.toString());
}
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway.local;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.gateway.Gateway;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.InternalTestCluster.RestartCallback;
import org.junit.Test;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
*
*/
@ClusterScope(scope= Scope.TEST, numDataNodes =0)
@Slow
public class LocalGatewayIndexStateTests extends ElasticsearchIntegrationTest {
private final ESLogger logger = Loggers.getLogger(LocalGatewayIndexStateTests.class);
@Test
public void testMappingMetaDataParsed() throws Exception {
logger.info("--> starting 1 nodes");
internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
logger.info("--> creating test index, with meta routing");
client().admin().indices().prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject())
.execute().actionGet();
logger.info("--> waiting for yellow status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(5).setWaitForYellowStatus().execute().actionGet();
if (health.isTimedOut()) {
ClusterStateResponse response = client().admin().cluster().prepareState().execute().actionGet();
System.out.println("" + response);
}
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify meta _routing required exists");
MappingMetaData mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test").mapping("type1");
assertThat(mappingMd.routing().required(), equalTo(true));
logger.info("--> restarting nodes...");
internalCluster().fullRestart();
logger.info("--> waiting for yellow status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(5).setWaitForYellowStatus().execute().actionGet();
if (health.isTimedOut()) {
ClusterStateResponse response = client().admin().cluster().prepareState().execute().actionGet();
System.out.println("" + response);
}
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify meta _routing required exists");
mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test").mapping("type1");
assertThat(mappingMd.routing().required(), equalTo(true));
}
@Test
public void testSimpleOpenClose() throws Exception {
logger.info("--> starting 2 nodes");
internalCluster().startNodesAsync(2, settingsBuilder().put("gateway.type", "local").build()).get();
logger.info("--> creating test index");
createIndex("test");
NumShards test = getNumShards("test");
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards));
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").execute().actionGet();
logger.info("--> closing test index...");
client().admin().indices().prepareClose("test").execute().actionGet();
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), nullValue());
logger.info("--> verifying that the state is green");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
logger.info("--> trying to index into a closed index ...");
try {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimeout("1s").execute().actionGet();
fail();
} catch (ClusterBlockException e) {
// all is well
}
logger.info("--> creating another index (test2) by indexing into it");
client().prepareIndex("test2", "type1", "1").setSource("field1", "value1").execute().actionGet();
logger.info("--> verifying that the state is green");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
logger.info("--> opening the first index again...");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> verifying that the state is green");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first index");
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> closing test index...");
client().admin().indices().prepareClose("test").execute().actionGet();
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), nullValue());
logger.info("--> restarting nodes...");
internalCluster().fullRestart();
logger.info("--> waiting for two nodes and green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), nullValue());
logger.info("--> trying to index into a closed index ...");
try {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimeout("1s").execute().actionGet();
fail();
} catch (ClusterBlockException e) {
// all is well
}
logger.info("--> opening index...");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first round (before close and shutdown)");
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "2").setSource("field1", "value1").execute().actionGet();
}
@Test
public void testJustMasterNode() throws Exception {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
internalCluster().startNode(settingsBuilder().put("node.data", false).put("gateway.type", "local").build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").execute().actionGet();
logger.info("--> closing master node");
internalCluster().closeNonSharedNodes(false);
logger.info("--> starting 1 master node non data again");
internalCluster().startNode(settingsBuilder().put("node.data", false).put("gateway.type", "local").build());
logger.info("--> waiting for test index to be created");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify we have an index");
ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().setIndices("test").execute().actionGet();
assertThat(clusterStateResponse.getState().metaData().hasIndex("test"), equalTo(true));
}
@Test
public void testJustMasterNodeAndJustDataNode() throws Exception {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
internalCluster().startNode(settingsBuilder().put("node.data", false).put("gateway.type", "local").build());
internalCluster().startNode(settingsBuilder().put("node.master", false).put("gateway.type", "local").build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").execute().actionGet();
logger.info("--> waiting for test index to be created");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test").setWaitForYellowStatus().execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
client().prepareIndex("test", "type1").setSource("field1", "value1").setTimeout("100ms").execute().actionGet();
}
@Test
public void testTwoNodesSingleDoc() throws Exception {
logger.info("--> cleaning nodes");
logger.info("--> starting 2 nodes");
internalCluster().startNode(settingsBuilder().put("gateway.type", "local").build());
internalCluster().startNode(settingsBuilder().put("gateway.type", "local").build());
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> closing test index...");
client().admin().indices().prepareClose("test").execute().actionGet();
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").state(), equalTo(IndexMetaData.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), nullValue());
logger.info("--> opening the index...");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
}
@Test
public void testDanglingIndicesAutoImportYes() throws Exception {
Settings settings = settingsBuilder()
.put("gateway.type", "local").put("gateway.local.auto_import_dangled", "yes")
.build();
logger.info("--> starting two nodes");
final String node_1 = internalCluster().startNode(settings);
internalCluster().startNode(settings);
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
logger.info("--> restarting the nodes");
final Gateway gateway1 = internalCluster().getInstance(Gateway.class, node_1);
internalCluster().fullRestart(new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
if (node_1.equals(nodeName)) {
logger.info("--> deleting the data for the first node");
gateway1.reset();
}
return null;
}
});
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
// spin a bit waiting for the index to exists
long time = System.currentTimeMillis();
while ((System.currentTimeMillis() - time) < TimeValue.timeValueSeconds(10).millis()) {
if (client().admin().indices().prepareExists("test").execute().actionGet().isExists()) {
break;
}
}
logger.info("--> verify that the dangling index exists");
assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true));
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify the doc is there");
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
@Test
public void testDanglingIndicesAutoImportClose() throws Exception {
Settings settings = settingsBuilder()
.put("gateway.type", "local").put("gateway.local.auto_import_dangled", "closed")
.build();
logger.info("--> starting two nodes");
final String node_1 = internalCluster().startNode(settings);
internalCluster().startNode(settings);
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
logger.info("--> restarting the nodes");
final Gateway gateway1 = internalCluster().getInstance(Gateway.class, node_1);
internalCluster().fullRestart(new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
if (node_1.equals(nodeName)) {
logger.info("--> deleting the data for the first node");
gateway1.reset();
}
return null;
}
});
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
// spin a bit waiting for the index to exists
long time = System.currentTimeMillis();
while ((System.currentTimeMillis() - time) < TimeValue.timeValueSeconds(10).millis()) {
if (client().admin().indices().prepareExists("test").execute().actionGet().isExists()) {
break;
}
}
logger.info("--> verify that the dangling index exists");
assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true));
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify the index state is closed");
assertThat(client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test").state(), equalTo(IndexMetaData.State.CLOSE));
logger.info("--> open the index");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify the doc is there");
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
@Test
public void testDanglingIndicesNoAutoImport() throws Exception {
Settings settings = settingsBuilder()
.put("gateway.type", "local").put("gateway.local.auto_import_dangled", "no")
.build();
logger.info("--> starting two nodes");
final String node_1 = internalCluster().startNode(settings);
internalCluster().startNode(settings);
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
logger.info("--> restarting the nodes");
final Gateway gateway1 = internalCluster().getInstance(Gateway.class, node_1);
internalCluster().fullRestart(new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
if (node_1.equals(nodeName)) {
logger.info("--> deleting the data for the first node");
gateway1.reset();
}
return null;
}
});
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
// we need to wait for the allocate dangled to kick in (even though in this case its disabled)
// just to make sure
Thread.sleep(500);
logger.info("--> verify that the dangling index does not exists");
assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(false));
logger.info("--> restart start the nodes, but make sure we do recovery only after we have 2 nodes in the cluster");
internalCluster().fullRestart(new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
return settingsBuilder().put("gateway.recover_after_nodes", 2).build();
}
});
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify that the dangling index does exists now!");
assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true));
logger.info("--> verify the doc is there");
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
@Test
public void testDanglingIndicesNoAutoImportStillDanglingAndCreatingSameIndex() throws Exception {
Settings settings = settingsBuilder()
.put("gateway.type", "local").put("gateway.local.auto_import_dangled", "no")
.build();
logger.info("--> starting two nodes");
final String node_1 = internalCluster().startNode(settings);
internalCluster().startNode(settings);
logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> restarting the nodes");
final Gateway gateway1 = internalCluster().getInstance(Gateway.class, node_1);
internalCluster().fullRestart(new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
if (node_1.equals(nodeName)) {
logger.info("--> deleting the data for the first node");
gateway1.reset();
}
return null;
}
});
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify that the dangling index does not exists");
assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(false));
logger.info("--> close the first node, so we remain with the second that has the dangling index");
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_1));
logger.info("--> index a different doc");
client().prepareIndex("test", "type1", "2").setSource("field1", "value2").setRefresh(true).execute().actionGet();
logger.info("--> verify that doc 2 does exist");
assertThat(client().prepareGet("test", "type1", "2").execute().actionGet().isExists(), equalTo(true));
// Need an ensure yellow here, since the index gets created (again) when we index doc2, so the shard that doc
// with id 1 is assigned to might not be in a started state. We don't need to do this when verifying if doc 2
// exists, because we index into the shard that doc gets assigned to.
ensureYellow("test");
logger.info("--> verify that doc 1 doesn't exist");
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.runtime.changestate;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.tuple;
import java.util.List;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.scope.ScopeTypes;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.entitylink.api.EntityLink;
import org.flowable.entitylink.api.EntityLinkType;
import org.flowable.entitylink.api.HierarchyType;
import org.flowable.task.api.Task;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* @author Frederik Heremans
* @author Joram Barrez
* @author Dennis Federico
*/
public class ChangeStateForCallActivityTest extends PluggableFlowableTestCase {
private ChangeStateEventListener changeStateEventListener = new ChangeStateEventListener();
@BeforeEach
protected void setUp() {
processEngine.getRuntimeService().addEventListener(changeStateEventListener);
}
@AfterEach
protected void tearDown() {
processEngine.getRuntimeService().removeEventListener(changeStateEventListener);
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/twoTasksParentProcess.bpmn20.xml", "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" })
public void testSetCurrentActivityInParentProcess() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksParentProcess");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
taskService.complete(task.getId());
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("theTask");
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(subProcessInstance.getId())
.moveActivityIdToParentActivityId("theTask", "secondTask")
.changeState();
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("secondTask");
assertThat(runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).count()).isZero();
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
List<Execution> executions = runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().list();
assertThat(executions).hasSize(1);
taskService.complete(task.getId());
assertProcessEnded(processInstance.getId());
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/twoTasksParentProcessV2.bpmn20.xml", "org/flowable/engine/test/api/twoTasksProcess.bpmn20.xml" })
public void testSetCurrentActivityInParentProcessV2() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksParentProcess");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
taskService.complete(task.getId());
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
taskService.complete(task.getId());
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("secondTask");
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(subProcessInstance.getId())
.moveActivityIdToParentActivityId("secondTask", "secondTask")
.changeState();
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("secondTask");
assertThat(runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).count()).isZero();
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
List<Execution> executions = runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().list();
assertThat(executions).hasSize(1);
taskService.complete(task.getId());
assertProcessEnded(processInstance.getId());
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/twoTasksParentProcess.bpmn20.xml", "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" })
public void testSetCurrentActivityInSubProcessInstance() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksParentProcess");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity")
.changeState();
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
assertThat(taskService.createTaskQuery().processInstanceId(processInstance.getId()).count()).isZero();
assertThat(taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(subProcessInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("theTask");
taskService.complete(task.getId());
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("secondTask");
taskService.complete(task.getId());
assertProcessEnded(processInstance.getId());
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/twoTasksParentProcessV2.bpmn20.xml", "org/flowable/engine/test/api/twoTasksProcess.bpmn20.xml" })
public void testSetCurrentActivityInSubProcessInstanceV2() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksParentProcess");
Task firstTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(firstTask.getTaskDefinitionKey()).isEqualTo("firstTask");
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(processInstance.getId()))
.extracting(EntityLink::getHierarchyType, EntityLink::getReferenceScopeId,
EntityLink::getReferenceScopeType, EntityLink::getLinkType)
.as("hierarchyType, referenceScopeId, referenceScopeType, linkType")
.containsExactlyInAnyOrder(
tuple(HierarchyType.ROOT, firstTask.getId(), ScopeTypes.TASK, EntityLinkType.CHILD)
);
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(processInstance.getId()))
.extracting(EntityLink::getRootScopeId, EntityLink::getRootScopeType)
.containsOnly(
tuple(processInstance.getId(), ScopeTypes.BPMN)
);
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "secondTask", "callActivity")
.changeState();
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
assertThat(taskService.createTaskQuery().processInstanceId(processInstance.getId()).count()).isZero();
assertThat(taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(subProcessInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
Task firstSecondTask = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(firstSecondTask.getTaskDefinitionKey()).isEqualTo("secondTask");
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(processInstance.getId()))
.extracting(EntityLink::getHierarchyType, EntityLink::getReferenceScopeId,
EntityLink::getReferenceScopeType, EntityLink::getLinkType)
.as("hierarchyType, referenceScopeId, referenceScopeType, linkType")
.containsExactlyInAnyOrder(
tuple(HierarchyType.ROOT, firstTask.getId(), ScopeTypes.TASK, EntityLinkType.CHILD),
tuple(HierarchyType.ROOT, firstSecondTask.getId(), ScopeTypes.TASK, EntityLinkType.CHILD),
tuple(HierarchyType.ROOT, subProcessInstance.getId(), ScopeTypes.BPMN, EntityLinkType.CHILD)
);
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(processInstance.getId()))
.extracting(EntityLink::getRootScopeId, EntityLink::getRootScopeType)
.containsOnly(
tuple(processInstance.getId(), ScopeTypes.BPMN)
);
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(subProcessInstance.getId()))
.extracting(EntityLink::getHierarchyType, EntityLink::getReferenceScopeId,
EntityLink::getReferenceScopeType, EntityLink::getLinkType)
.as("hierarchyType, referenceScopeId, referenceScopeType, linkType")
.containsExactlyInAnyOrder(
tuple(HierarchyType.PARENT, firstSecondTask.getId(), ScopeTypes.TASK, EntityLinkType.CHILD)
);
assertThat(runtimeService.getEntityLinkChildrenForProcessInstance(subProcessInstance.getId()))
.extracting(EntityLink::getRootScopeId, EntityLink::getRootScopeType)
.containsOnly(
tuple(processInstance.getId(), ScopeTypes.BPMN)
);
taskService.complete(firstSecondTask.getId());
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
Task secondTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(secondTask.getTaskDefinitionKey()).isEqualTo("secondTask");
taskService.complete(secondTask.getId());
assertProcessEnded(processInstance.getId());
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/variables/callActivityWithCalledElementExpression.bpmn20.xml" })
public void testSetCurrentActivityInSubProcessInstanceWithCalledElementExpression() {
//Deploy second version of the process definition
deployProcessDefinition("my deploy", "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("calledElementExpression");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
//First change state attempt fails as the calledElement expression cannot be evaluated
assertThatThrownBy(() -> runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity")
.changeState())
.isExactlyInstanceOf(FlowableException.class)
.hasMessage("Cannot resolve calledElement expression '${subProcessDefId}' of callActivity 'callActivity'");
//Change state specifying the variable with the value
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity", 1)
.processVariable("subProcessDefId", "oneTaskProcess")
.changeState();
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
assertThat(taskService.createTaskQuery().processInstanceId(processInstance.getId()).count()).isZero();
assertThat(taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(subProcessInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("theTask");
taskService.complete(task.getId());
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("lastTask");
taskService.complete(task.getId());
assertProcessEnded(processInstance.getId());
deleteDeployments();
}
@Test
@Deployment(resources = { "org/flowable/engine/test/api/twoTasksParentProcess.bpmn20.xml", "org/flowable/engine/test/api/oneTaskProcess.bpmn20.xml" })
public void testSetCurrentActivityInSubProcessInstanceSpecificVersion() {
//Deploy second version of the process definition
deployProcessDefinition("my deploy", "org/flowable/engine/test/api/oneTaskProcessV2.bpmn20.xml");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksParentProcess");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("firstTask");
assertThatThrownBy(() -> runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity")
.changeState())
.isExactlyInstanceOf(FlowableException.class)
.hasMessage("Cannot find activity 'theTask' in process definition with id 'oneTaskProcess'");
//Invalid "unExistent" process definition version
assertThatThrownBy(() -> runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity", 5)
.changeState())
.isExactlyInstanceOf(FlowableException.class)
.hasMessage("Cannot find activity 'theTask' in process definition with id 'oneTaskProcess'");
//Change state specifying the first version
runtimeService.createChangeActivityStateBuilder()
.processInstanceId(processInstance.getId())
.moveActivityIdToSubProcessInstanceActivityId("firstTask", "theTask", "callActivity", 1)
.changeState();
ProcessInstance subProcessInstance = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertThat(subProcessInstance).isNotNull();
assertThat(taskService.createTaskQuery().processInstanceId(processInstance.getId()).count()).isZero();
assertThat(taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
assertThat(runtimeService.createExecutionQuery().processInstanceId(subProcessInstance.getId()).onlyChildExecutions().count()).isEqualTo(1);
task = taskService.createTaskQuery().processInstanceId(subProcessInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("theTask");
taskService.complete(task.getId());
assertThat(runtimeService.createProcessInstanceQuery().processInstanceId(subProcessInstance.getId()).count()).isZero();
task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getTaskDefinitionKey()).isEqualTo("secondTask");
taskService.complete(task.getId());
assertProcessEnded(processInstance.getId());
deleteDeployments();
}
}
| |
package org.hl7.fhir.instance.model;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Oct 21, 2014 07:07+1100 for FHIR v0.3.0
import java.util.*;
/**
* Information about a person that is involved in the care for a patient, but who is not the target of healthcare, nor has a formal responsibility in the care process.
*/
public class RelatedPerson extends Resource {
public enum AdministrativeGender {
MALE, // Male
FEMALE, // Female
OTHER, // Other
UNKNOWN, // Unknown
NULL; // added to help the parsers
public static AdministrativeGender fromCode(String codeString) throws Exception {
if (codeString == null || "".equals(codeString))
return null;
if ("male".equals(codeString))
return MALE;
if ("female".equals(codeString))
return FEMALE;
if ("other".equals(codeString))
return OTHER;
if ("unknown".equals(codeString))
return UNKNOWN;
throw new Exception("Unknown AdministrativeGender code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case MALE: return "male";
case FEMALE: return "female";
case OTHER: return "other";
case UNKNOWN: return "unknown";
default: return "?";
}
}
public String getDefinition() {
switch (this) {
case MALE: return "Male";
case FEMALE: return "Female";
case OTHER: return "Other";
case UNKNOWN: return "Unknown";
default: return "?";
}
}
public String getDisplay() {
switch (this) {
case MALE: return "";
case FEMALE: return "";
case OTHER: return "";
case UNKNOWN: return "";
default: return "?";
}
}
}
public static class AdministrativeGenderEnumFactory implements EnumFactory {
public Enum<?> fromCode(String codeString) throws Exception {
if (codeString == null || "".equals(codeString))
if (codeString == null || "".equals(codeString))
return null;
if ("male".equals(codeString))
return AdministrativeGender.MALE;
if ("female".equals(codeString))
return AdministrativeGender.FEMALE;
if ("other".equals(codeString))
return AdministrativeGender.OTHER;
if ("unknown".equals(codeString))
return AdministrativeGender.UNKNOWN;
throw new Exception("Unknown AdministrativeGender code '"+codeString+"'");
}
public String toCode(Enum<?> code) throws Exception {
if (code == AdministrativeGender.MALE)
return "male";
if (code == AdministrativeGender.FEMALE)
return "female";
if (code == AdministrativeGender.OTHER)
return "other";
if (code == AdministrativeGender.UNKNOWN)
return "unknown";
return "?";
}
}
/**
* Identifier for a person within a particular scope.
*/
protected List<Identifier> identifier = new ArrayList<Identifier>();
/**
* The patient this person is related to.
*/
protected Reference patient;
/**
* The actual object that is the target of the reference (The patient this person is related to.)
*/
protected Patient patientTarget;
/**
* The nature of the relationship between a patient and the related person.
*/
protected CodeableConcept relationship;
/**
* A name associated with the person.
*/
protected HumanName name;
/**
* A contact detail for the person, e.g. a telephone number or an email address.
*/
protected List<ContactPoint> telecom = new ArrayList<ContactPoint>();
/**
* Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.
*/
protected Enumeration<AdministrativeGender> gender;
/**
* Address where the related person can be contacted or visited.
*/
protected Address address;
/**
* Image of the person.
*/
protected List<Attachment> photo = new ArrayList<Attachment>();
private static final long serialVersionUID = 152396611L;
public RelatedPerson() {
super();
}
public RelatedPerson(Reference patient) {
super();
this.patient = patient;
}
/**
* @return {@link #identifier} (Identifier for a person within a particular scope.)
*/
public List<Identifier> getIdentifier() {
return this.identifier;
}
// syntactic sugar
/**
* @return {@link #identifier} (Identifier for a person within a particular scope.)
*/
public Identifier addIdentifier() {
Identifier t = new Identifier();
this.identifier.add(t);
return t;
}
/**
* @return {@link #patient} (The patient this person is related to.)
*/
public Reference getPatient() {
return this.patient;
}
/**
* @param value {@link #patient} (The patient this person is related to.)
*/
public RelatedPerson setPatient(Reference value) {
this.patient = value;
return this;
}
/**
* @return {@link #patient} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The patient this person is related to.)
*/
public Patient getPatientTarget() {
return this.patientTarget;
}
/**
* @param value {@link #patient} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The patient this person is related to.)
*/
public RelatedPerson setPatientTarget(Patient value) {
this.patientTarget = value;
return this;
}
/**
* @return {@link #relationship} (The nature of the relationship between a patient and the related person.)
*/
public CodeableConcept getRelationship() {
return this.relationship;
}
/**
* @param value {@link #relationship} (The nature of the relationship between a patient and the related person.)
*/
public RelatedPerson setRelationship(CodeableConcept value) {
this.relationship = value;
return this;
}
/**
* @return {@link #name} (A name associated with the person.)
*/
public HumanName getName() {
return this.name;
}
/**
* @param value {@link #name} (A name associated with the person.)
*/
public RelatedPerson setName(HumanName value) {
this.name = value;
return this;
}
/**
* @return {@link #telecom} (A contact detail for the person, e.g. a telephone number or an email address.)
*/
public List<ContactPoint> getTelecom() {
return this.telecom;
}
// syntactic sugar
/**
* @return {@link #telecom} (A contact detail for the person, e.g. a telephone number or an email address.)
*/
public ContactPoint addTelecom() {
ContactPoint t = new ContactPoint();
this.telecom.add(t);
return t;
}
/**
* @return {@link #gender} (Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.). This is the underlying object with id, value and extensions. The accessor "getGender" gives direct access to the value
*/
public Enumeration<AdministrativeGender> getGenderElement() {
return this.gender;
}
/**
* @param value {@link #gender} (Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.). This is the underlying object with id, value and extensions. The accessor "getGender" gives direct access to the value
*/
public RelatedPerson setGenderElement(Enumeration<AdministrativeGender> value) {
this.gender = value;
return this;
}
/**
* @return Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.
*/
public AdministrativeGender getGender() {
return this.gender == null ? null : this.gender.getValue();
}
/**
* @param value Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.
*/
public RelatedPerson setGender(AdministrativeGender value) {
if (value == null)
this.gender = null;
else {
if (this.gender == null)
this.gender = new Enumeration<AdministrativeGender>();
this.gender.setValue(value);
}
return this;
}
/**
* @return {@link #address} (Address where the related person can be contacted or visited.)
*/
public Address getAddress() {
return this.address;
}
/**
* @param value {@link #address} (Address where the related person can be contacted or visited.)
*/
public RelatedPerson setAddress(Address value) {
this.address = value;
return this;
}
/**
* @return {@link #photo} (Image of the person.)
*/
public List<Attachment> getPhoto() {
return this.photo;
}
// syntactic sugar
/**
* @return {@link #photo} (Image of the person.)
*/
public Attachment addPhoto() {
Attachment t = new Attachment();
this.photo.add(t);
return t;
}
protected void listChildren(List<Property> childrenList) {
super.listChildren(childrenList);
childrenList.add(new Property("identifier", "Identifier", "Identifier for a person within a particular scope.", 0, java.lang.Integer.MAX_VALUE, identifier));
childrenList.add(new Property("patient", "Reference(Patient)", "The patient this person is related to.", 0, java.lang.Integer.MAX_VALUE, patient));
childrenList.add(new Property("relationship", "CodeableConcept", "The nature of the relationship between a patient and the related person.", 0, java.lang.Integer.MAX_VALUE, relationship));
childrenList.add(new Property("name", "HumanName", "A name associated with the person.", 0, java.lang.Integer.MAX_VALUE, name));
childrenList.add(new Property("telecom", "ContactPoint", "A contact detail for the person, e.g. a telephone number or an email address.", 0, java.lang.Integer.MAX_VALUE, telecom));
childrenList.add(new Property("gender", "code", "Administrative Gender - the gender that the person is considered to have for administration and record keeping purposes.", 0, java.lang.Integer.MAX_VALUE, gender));
childrenList.add(new Property("address", "Address", "Address where the related person can be contacted or visited.", 0, java.lang.Integer.MAX_VALUE, address));
childrenList.add(new Property("photo", "Attachment", "Image of the person.", 0, java.lang.Integer.MAX_VALUE, photo));
}
public RelatedPerson copy() {
RelatedPerson dst = new RelatedPerson();
dst.identifier = new ArrayList<Identifier>();
for (Identifier i : identifier)
dst.identifier.add(i.copy());
dst.patient = patient == null ? null : patient.copy();
dst.relationship = relationship == null ? null : relationship.copy();
dst.name = name == null ? null : name.copy();
dst.telecom = new ArrayList<ContactPoint>();
for (ContactPoint i : telecom)
dst.telecom.add(i.copy());
dst.gender = gender == null ? null : gender.copy();
dst.address = address == null ? null : address.copy();
dst.photo = new ArrayList<Attachment>();
for (Attachment i : photo)
dst.photo.add(i.copy());
return dst;
}
protected RelatedPerson typedCopy() {
return copy();
}
@Override
public ResourceType getResourceType() {
return ResourceType.RelatedPerson;
}
}
| |
package org.apache.hadoop.hive.cassandra;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.TypeParser;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.SyntaxException;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.Hex;
import org.apache.hadoop.hive.cassandra.serde.AbstractCassandraSerDe;
import org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator;
import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazy.LazyCassandraUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.thrift.protocol.TBinaryProtocol;
public class CassandraPushdownPredicate {
private static final Logger logger = LoggerFactory.getLogger(CassandraPushdownPredicate.class);
/**
* Get metadata for the columns which have secondary indexes
*
* @param host
* @param port
* @param ksName keyspace name
* @param cfName column family name
* @return A set of ColumnDefs representing the indexed columns of the cf.
* Only the name of the column and its validation class are required, at the
* moment, so all other fields are left unset
* @throws CassandraException if a problem is encountered communicating with
* Cassandra
*/
public static Set<ColumnDef> getIndexedColumns(String host, int port, String ksName, String cfName) throws CassandraException {
final CassandraProxyClient client = new CassandraProxyClient(host, port, true, true);
Set<ColumnDef> indexedColumns = new HashSet<ColumnDef>();
try {
KsDef ks = client.getProxyConnection().describe_keyspace(ksName);
List<CfDef> cfs = ks.getCf_defs();
CfDef cfDef = null;
for (CfDef thisCf : cfs) {
if (thisCf.getName().equalsIgnoreCase(cfName)) {
cfDef = thisCf;
break;
}
}
List<ColumnDef> columns = cfDef.getColumn_metadata();
for (ColumnDef thisColumn : columns) {
if (thisColumn.isSetIndex_type()) {
ColumnDef indexed = new ColumnDef();
indexed.setName(thisColumn.getName());
indexed.setValidation_class(thisColumn.getValidation_class());
indexedColumns.add(indexed);
}
}
} catch (TException e) {
throw new CassandraException(e);
}
return indexedColumns;
}
/**
* Serialize a set of ColumnDefs for indexed columns, so that it can be
* written to Job configuration
*
* @param columns column metadata
* @return serialized form
*/
public static String serializeIndexedColumns(Set<ColumnDef> columns) {
TSerializer serializer = new TSerializer(new TBinaryProtocol.Factory());
try {
List<String> hexStrings = new ArrayList<String>();
for (ColumnDef column : columns) {
String encoded = Hex.bytesToHex(serializer.serialize(column));
logger.info("Encoded column def: " + encoded);
hexStrings.add(encoded);
}
return Joiner.on(AbstractCassandraSerDe.DELIMITER).join(hexStrings);
} catch (TException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize a set of ColumnDefs for indexed columns, read from Job
* configuration
*
* @param serialized column metadata
* @return list of column metadata objects which may be empty, but not null
*/
public static Set<ColumnDef> deserializeIndexedColumns(String serialized) {
Set<ColumnDef> columns = new HashSet<ColumnDef>();
if (null == serialized) {
return columns;
}
Iterable<String> strings = Splitter.on(AbstractCassandraSerDe.DELIMITER).omitEmptyStrings().trimResults().split(serialized);
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
for (String encoded : strings) {
ColumnDef column = new ColumnDef();
try {
logger.info("Encoded column def: " + encoded);
deserializer.deserialize(column, Hex.hexToBytes(encoded));
} catch (TException e) {
logger.warn("Error deserializing indexed column definition", e);
}
if (null == column.getName() || null == column.validation_class) {
continue;
}
columns.add(column);
}
return columns;
}
/**
* Given a set of indexed column names, return an IndexPredicateAnalyzer
*
* @param indexedColumns names of indexed columns
* @return IndexPredicateAnalyzer
*/
public static IndexPredicateAnalyzer newIndexPredicateAnalyzer(Set<ColumnDef> indexedColumns) {
IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
// we only support C*'s set of comparisons = > >= =< <
analyzer.addComparisonOp(GenericUDFOPEqual.class.getName());
analyzer.addComparisonOp(GenericUDFOPEqualOrGreaterThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPGreaterThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPEqualOrLessThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPLessThan.class.getName());
for (ColumnDef column : indexedColumns) {
analyzer.allowColumnName(new String(column.getName()));
}
return analyzer;
}
/**
* An IndexClause in C* must always include at least 1 EQ condition.
* Validate this constraint is satisified by the list of
* IndexSearchConditions
*
* @return true if there is an EQ operator present, otherwise false
*/
public static boolean verifySearchConditions(List<IndexSearchCondition> conditions) {
for (IndexSearchCondition thisCon : conditions) {
if (thisCon.getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
return true;
}
}
return false;
}
/**
* Translate the list of Hive SearchConditions into C* IndexExpressions.
*
* @param conditions a list of index search condition
* @return list of IndexExpressions, which may be empty but not null
*/
public static List<IndexExpression> translateSearchConditions(List<IndexSearchCondition> conditions, Set<ColumnDef> indexedColumns) throws IOException {
List<IndexExpression> exps = new ArrayList<IndexExpression>();
for (IndexSearchCondition thisCond : conditions) {
exps.add(translateSearchCondition(thisCond, indexedColumns));
}
return exps;
}
private static IndexExpression translateSearchCondition(IndexSearchCondition condition, Set<ColumnDef> columnInfos) throws IOException {
IndexExpression expr = new IndexExpression();
String columnName = condition.getColumnDesc().getColumn();
expr.setColumn_name(columnName.getBytes());
expr.setOp(getIndexOperator(condition.getComparisonOp()));
ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(condition.getConstantDesc());
byte[] value;
try {
ObjectInspector objInspector = eval.initialize(null);
Object writable = eval.evaluate(null);
ByteStream.Output serializeStream = new ByteStream.Output();
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
AbstractType validator = getValidator(columnInfos, columnName);
ByteBuffer bytes = getIndexExpressionValue(condition.getConstantDesc(), poi, writable, validator);
serializeStream.write(ByteBufferUtil.getArray(bytes));
value = new byte[serializeStream.getCount()];
System.arraycopy(serializeStream.getData(), 0, value, 0, serializeStream.getCount());
} catch (HiveException e) {
throw new IOException(e);
}
expr.setValue(value);
logger.info("IndexExpression.value : {}", new String(expr.getValue()));
return expr;
}
private static AbstractType getValidator(Set<ColumnDef> columnInfos, String columnName) {
for (ColumnDef column : columnInfos) {
if (new String(column.getName()).equals(columnName)) {
try {
return TypeParser.parse(column.validation_class);
} catch (ConfigurationException e) {
logger.error("Error creating validator from string {}", column.validation_class);
throw new RuntimeException(e);
} catch (SyntaxException e) {
logger.error("Syntax exception in parsing: \n {}", e.getMessage());
throw new RuntimeException(e);
}
}
}
logger.error("Error finding validator class for column {}", columnName);
throw new RuntimeException("Error finding validator class for column " + columnName);
}
private static ByteBuffer getIndexExpressionValue(ExprNodeConstantDesc constantDesc, PrimitiveObjectInspector poi, Object writable, AbstractType validator) {
logger.info("Primitive Category: {}, Validation class: {}, CassandraType: {}",
new Object[]{poi.getPrimitiveCategory(), validator.getClass().getName(), LazyCassandraUtils.getCassandraType(poi)});
switch (poi.getPrimitiveCategory()) {
case TIMESTAMP:
String dateString = new java.sql.Date(
((java.sql.Timestamp) poi.getPrimitiveJavaObject(writable)).getTime())
.toString();
return validator.fromString(dateString);
case BINARY:
byte[] bytes = ((ByteArrayRef) poi.getPrimitiveJavaObject(writable)).getData();
// this will only work if the value has been cast using one of the UDFs
// UDFHexToBytes, UDFUuid, UDFDecimal, UDFVarint
return ByteBuffer.wrap(bytes);
default:
return validator.fromString(constantDesc.getValue().toString());
}
}
private static IndexOperator getIndexOperator(String str) throws IOException {
if (str.equals(GenericUDFOPEqual.class.getName())) {
return IndexOperator.EQ;
} else if (str.equals(GenericUDFOPEqualOrGreaterThan.class.getName())) {
return IndexOperator.GTE;
} else if (str.equals(GenericUDFOPGreaterThan.class.getName())) {
return IndexOperator.GT;
} else if (str.equals(GenericUDFOPEqualOrLessThan.class.getName())) {
return IndexOperator.LTE;
} else if (str.equals(GenericUDFOPLessThan.class.getName())) {
return IndexOperator.LT;
} else {
throw new IOException("Unable to get index operator matches " + str);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.