gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpointemail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents the email message that you're sending. The <code>Message</code> object consists of a subject line and a
* message body.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-email-2018-07-26/Message" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Message implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can specify
* non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* </p>
*/
private Content subject;
/**
* <p>
* The body of the message. You can specify an HTML version of the message, a text-only version of the message, or
* both.
* </p>
*/
private Body body;
/**
* <p>
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can specify
* non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* </p>
*
* @param subject
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can
* specify non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
*/
public void setSubject(Content subject) {
this.subject = subject;
}
/**
* <p>
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can specify
* non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* </p>
*
* @return The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can
* specify non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
*/
public Content getSubject() {
return this.subject;
}
/**
* <p>
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can specify
* non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* </p>
*
* @param subject
* The subject line of the email. The subject line can only contain 7-bit ASCII characters. However, you can
* specify non-ASCII characters in the subject line by using encoded-word syntax, as described in <a
* href="https://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Message withSubject(Content subject) {
setSubject(subject);
return this;
}
/**
* <p>
* The body of the message. You can specify an HTML version of the message, a text-only version of the message, or
* both.
* </p>
*
* @param body
* The body of the message. You can specify an HTML version of the message, a text-only version of the
* message, or both.
*/
public void setBody(Body body) {
this.body = body;
}
/**
* <p>
* The body of the message. You can specify an HTML version of the message, a text-only version of the message, or
* both.
* </p>
*
* @return The body of the message. You can specify an HTML version of the message, a text-only version of the
* message, or both.
*/
public Body getBody() {
return this.body;
}
/**
* <p>
* The body of the message. You can specify an HTML version of the message, a text-only version of the message, or
* both.
* </p>
*
* @param body
* The body of the message. You can specify an HTML version of the message, a text-only version of the
* message, or both.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Message withBody(Body body) {
setBody(body);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSubject() != null)
sb.append("Subject: ").append(getSubject()).append(",");
if (getBody() != null)
sb.append("Body: ").append(getBody());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Message == false)
return false;
Message other = (Message) obj;
if (other.getSubject() == null ^ this.getSubject() == null)
return false;
if (other.getSubject() != null && other.getSubject().equals(this.getSubject()) == false)
return false;
if (other.getBody() == null ^ this.getBody() == null)
return false;
if (other.getBody() != null && other.getBody().equals(this.getBody()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSubject() == null) ? 0 : getSubject().hashCode());
hashCode = prime * hashCode + ((getBody() == null) ? 0 : getBody().hashCode());
return hashCode;
}
@Override
public Message clone() {
try {
return (Message) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.pinpointemail.model.transform.MessageMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2015 Fabien Renaud.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lympid.core.behaviorstatemachines.composite;
import com.lympid.core.basicbehaviors.StringEvent;
import com.lympid.core.behaviorstatemachines.AbstractStateMachineTest;
import com.lympid.core.behaviorstatemachines.ActiveStateTree;
import com.lympid.core.behaviorstatemachines.FinalStateTest;
import com.lympid.core.behaviorstatemachines.InitialPseudoStateTest;
import com.lympid.core.behaviorstatemachines.Region;
import com.lympid.core.behaviorstatemachines.RegionTest;
import com.lympid.core.behaviorstatemachines.SequentialContext;
import com.lympid.core.behaviorstatemachines.SimpleStateTest;
import com.lympid.core.behaviorstatemachines.State;
import com.lympid.core.behaviorstatemachines.StateMachineExecutor;
import com.lympid.core.behaviorstatemachines.StateMachineTester;
import com.lympid.core.behaviorstatemachines.TransitionTest;
import com.lympid.core.behaviorstatemachines.Vertex;
import com.lympid.core.behaviorstatemachines.VertexTest;
import com.lympid.core.behaviorstatemachines.builder.CompositeStateBuilder;
import com.lympid.core.behaviorstatemachines.builder.ExitPointBuilder;
import com.lympid.core.behaviorstatemachines.builder.StateMachineBuilder;
import com.lympid.core.behaviorstatemachines.composite.Test15.Context;
import org.junit.Test;
import static com.lympid.core.behaviorstatemachines.StateMachineProcessorTester.assertSnapshotEquals;
import static org.junit.Assert.assertEquals;
/**
* Tests an external transition which as the exit point of a composite state as
* source and which targets the composite state itself.
*
* @author Fabien Renaud
*/
public class Test15 extends AbstractStateMachineTest<Context> {
@Test
public void model() {
assertEquals(getClass().getSimpleName(), topLevelStateMachine().getName());
Region region = StateMachineTester.assertTopLevelStateMachine(topLevelStateMachine());
StateMachineTester.assertRegion(region, 3, 3,
new VertexTest[]{
new InitialPseudoStateTest("#4"),
new VertexTest("A", this::verifyA),
new FinalStateTest("end")
},
new TransitionTest[]{
new TransitionTest("#5", "#4", "A"),
new TransitionTest("#12", "Aa", "end"),
new TransitionTest("#14", "A_exitPoint", "A")
}
);
}
private void verifyA(Vertex v) {
State s = StateMachineTester.assertComposite(v);
StateMachineTester.assertRegions(s.region(), 1,
new RegionTest("7", null, 2, 2,
new VertexTest[]{
new InitialPseudoStateTest("#8"),
new SimpleStateTest("Aa")
},
new TransitionTest[]{
new TransitionTest("#9", "#8", "Aa"),
new TransitionTest("#11", "Aa", "A_exitPoint")
}
)
);
}
@Test
public void run_go() {
run_letN_go(0);
}
@Test
public void run_let_go() {
run_letN_go(1);
}
@Test
public void run_let5_go() {
run_letN_go(5);
}
private void run_letN_go(final int n) {
SequentialContext expected = new SequentialContext()
.effect("t0").enter("A")
.effect("t1").enter("Aa");
for (int i = 0; i < n; i++) {
expected
.exit("Aa").effect("t2")
.exit("A").effect("t3").enter("A").effect("t1").enter("Aa");
}
expected
.exit("Aa").exit("A").effect("t4");
Context ctx = new Context();
StateMachineExecutor<Context> fsm = fsm(ctx);
fsm.go();
assertSnapshotEquals(fsm, new ActiveStateTree(this).branch("A", "Aa"));
for (int i = 0; i < n; i++) {
fsm.take(new StringEvent("let"));
assertSnapshotEquals(fsm, new ActiveStateTree(this).branch("A", "Aa"));
}
fsm.take(new StringEvent("go"));
assertSnapshotEquals(fsm, new ActiveStateTree(this).branch("end"));
assertSequentialContextEquals(expected, fsm);
}
@Override
public StateMachineBuilder<Context> topLevelMachineBuilder() {
StateMachineBuilder<Context> builder = new StateMachineBuilder<>(name());
builder
.region()
.finalState("end");
builder
.region()
.initial()
.transition("t0")
.target("A");
builder
.region()
.state(compositeA("A"));
return builder;
}
private CompositeStateBuilder<Context> compositeA(final String name) {
CompositeStateBuilder<Context> builder = new CompositeStateBuilder<>(name);
builder
.region()
.initial()
.transition("t1")
.target("Aa");
builder
.region()
.state("Aa")
.transition("t2")
.on("let")
.target("A_exitPoint")
.transition("t4")
.on("go")
.target("end");
builder
.connectionPoint()
.exitPoint(new ExitPointBuilder<Context>("A_exitPoint")
.transition("t3")
.target("A")
);
return builder;
}
@Override
public String stdOut() {
return STDOUT;
}
public static final class Context extends SequentialContext {
}
private static final String STDOUT = "StateMachine: \"" + Test15.class.getSimpleName() + "\"\n" +
" Region: #2\n" +
" FinalState: \"end\"\n" +
" PseudoState: #4 kind: INITIAL\n" +
" State: \"A\"\n" +
" ExitPoint: \"A_exitPoint\"\n" +
" Region: #7\n" +
" PseudoState: #8 kind: INITIAL\n" +
" State: \"Aa\"\n" +
" Transition: \"t1\" --- #8 -> \"Aa\"\n" +
" Transition: \"t2\" --- \"Aa\" -> \"A_exitPoint\"\n" +
" Transition: \"t0\" --- #4 -> \"A\"\n" +
" Transition: \"t4\" --- \"Aa\" -> \"end\"\n" +
" Transition: \"t3\" --- \"A_exitPoint\" -> \"A\"";
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest;
import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.ql.DriverFactory;
import org.apache.hadoop.hive.ql.IDriver;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.Utils;
import org.junit.rules.TestName;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.AfterClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Collections;
import java.util.Map;
import static org.apache.hadoop.hive.metastore.ReplChangeManager.SOURCE_OF_REPLICATION;
/**
* TestReplicationScenariosAcidTablesBase - base class for replication for ACID tables tests
*/
public class BaseReplicationScenariosAcidTables {
@Rule
public final TestName testName = new TestName();
protected static final Logger LOG = LoggerFactory.getLogger(TestReplicationScenarios.class);
static WarehouseInstance primary;
static WarehouseInstance replica, replicaNonAcid;
static HiveConf conf;
String primaryDbName, replicatedDbName;
List<String> acidTableNames = new LinkedList<>();
private List<String> nonAcidTableNames = new LinkedList<>();
static void internalBeforeClassSetup(Map<String, String> overrides, Class clazz)
throws Exception {
conf = new HiveConf(clazz);
conf.set("dfs.client.use.datanode.hostname", "true");
conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*");
MiniDFSCluster miniDFSCluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).build();
HashMap<String, String> acidEnableConf = new HashMap<String, String>() {{
put("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString());
put("hive.support.concurrency", "true");
put("hive.txn.manager", "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
put("hive.metastore.client.capability.check", "false");
put("hive.repl.bootstrap.dump.open.txn.timeout", "1s");
put("hive.exec.dynamic.partition.mode", "nonstrict");
put("hive.strict.checks.bucketing", "false");
put("hive.mapred.mode", "nonstrict");
put("mapred.input.dir.recursive", "true");
put("hive.metastore.disallow.incompatible.col.type.changes", "false");
put("hive.in.repl.test", "true");
}};
acidEnableConf.putAll(overrides);
primary = new WarehouseInstance(LOG, miniDFSCluster, acidEnableConf);
replica = new WarehouseInstance(LOG, miniDFSCluster, acidEnableConf);
HashMap<String, String> overridesForHiveConf1 = new HashMap<String, String>() {{
put("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString());
put("hive.support.concurrency", "false");
put("hive.txn.manager", "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
put("hive.metastore.client.capability.check", "false");
}};
replicaNonAcid = new WarehouseInstance(LOG, miniDFSCluster, overridesForHiveConf1);
}
@AfterClass
public static void classLevelTearDown() throws IOException {
primary.close();
replica.close();
}
@Before
public void setup() throws Throwable {
primaryDbName = testName.getMethodName() + "_" + +System.currentTimeMillis();
replicatedDbName = "replicated_" + primaryDbName;
primary.run("create database " + primaryDbName + " WITH DBPROPERTIES ( '" +
SOURCE_OF_REPLICATION + "' = '1,2,3')");
String primaryDbNameExtra = primaryDbName+"_extra";
primary.run("create database " + primaryDbNameExtra + " WITH DBPROPERTIES ( '" +
SOURCE_OF_REPLICATION + "' = '1,2,3')");
}
@After
public void tearDown() throws Throwable {
primary.run("drop database if exists " + primaryDbName + " cascade");
replica.run("drop database if exists " + replicatedDbName + " cascade");
replicaNonAcid.run("drop database if exists " + replicatedDbName + " cascade");
primary.run("drop database if exists " + primaryDbName + "_extra cascade");
}
private void prepareAcidData(String primaryDbName) throws Throwable {
primary.run("use " + primaryDbName)
.run("create table t1 (id int) clustered by(id) into 3 buckets stored as orc " +
"tblproperties (\"transactional\"=\"true\")")
.run("insert into t1 values(1)")
.run("insert into t1 values(2)")
.run("create table t2 (place string) partitioned by (country string) clustered by(place) " +
"into 3 buckets stored as orc tblproperties (\"transactional\"=\"true\")")
.run("insert into t2 partition(country='india') values ('bangalore')")
.run("insert into t2 partition(country='us') values ('austin')")
.run("insert into t2 partition(country='france') values ('paris')")
.run("alter table t2 add partition(country='italy')")
.run("create table t3 (rank int) tblproperties(\"transactional\"=\"true\", " +
"\"transactional_properties\"=\"insert_only\")")
.run("insert into t3 values(11)")
.run("insert into t3 values(22)")
.run("create table t5 (id int) stored as orc tblproperties (\"transactional\"=\"true\")")
.run("insert into t5 values(1111), (2222), (3333)");
acidTableNames.add("t1");
acidTableNames.add("t2");
acidTableNames.add("t3");
acidTableNames.add("t5");
}
private void prepareNonAcidData(String primaryDbName) throws Throwable {
primary.run("use " + primaryDbName)
.run("create table t4 (id int)")
.run("insert into t4 values(111), (222)");
nonAcidTableNames.add("t4");
}
WarehouseInstance.Tuple prepareDataAndDump(String primaryDbName, String fromReplId,
List<String> withClause) throws Throwable {
prepareAcidData(primaryDbName);
prepareNonAcidData(primaryDbName);
return primary.run("use " + primaryDbName)
.dump(primaryDbName, fromReplId, withClause != null ?
withClause : Collections.emptyList());
}
private void verifyNonAcidTableLoad(String replicatedDbName) throws Throwable {
replica.run("use " + replicatedDbName)
.run("select id from t4 order by id")
.verifyResults(new String[] {"111", "222"});
}
private void verifyAcidTableLoad(String replicatedDbName) throws Throwable {
replica.run("use " + replicatedDbName)
.run("select id from t1 order by id")
.verifyResults(new String[]{"1", "2"})
.run("select country from t2 order by country")
.verifyResults(new String[] {"france", "india", "us"})
.run("select rank from t3 order by rank")
.verifyResults(new String[] {"11", "22"})
.run("select id from t5 order by id")
.verifyResults(new String[] {"1111", "2222", "3333"});
}
void verifyLoadExecution(String replicatedDbName, String lastReplId, boolean includeAcid)
throws Throwable {
List<String> tableNames = new LinkedList<>(nonAcidTableNames);
if (includeAcid) {
tableNames.addAll(acidTableNames);
}
replica.run("use " + replicatedDbName)
.run("show tables")
.verifyResults(tableNames)
.run("repl status " + replicatedDbName)
.verifyResult(lastReplId);
verifyNonAcidTableLoad(replicatedDbName);
if (includeAcid) {
verifyAcidTableLoad(replicatedDbName);
}
}
void prepareIncAcidData(String dbName) throws Throwable {
primary.run("use " + dbName)
.run("create table t6 stored as orc tblproperties (\"transactional\"=\"true\")" +
" as select * from t1")
.run("alter table t2 add columns (placetype string)")
.run("update t2 set placetype = 'city'");
acidTableNames.add("t6");
}
private void verifyIncAcidLoad(String dbName) throws Throwable {
replica.run("use " + dbName)
.run("select id from t6 order by id")
.verifyResults(new String[]{"1", "2"})
.run("select country from t2 order by country")
.verifyResults(new String[] {"france", "india", "us"})
.run("select distinct placetype from t2")
.verifyResult("city")
.run("select id from t1 order by id")
.verifyResults(new String[]{"1", "2"})
.run("select rank from t3 order by rank")
.verifyResults(new String[] {"11", "22"})
.run("select id from t5 order by id")
.verifyResults(new String[] {"1111", "2222", "3333"});
}
private void runUsingDriver(IDriver driver, String command) throws Throwable {
CommandProcessorResponse ret = driver.run(command);
if (ret.getException() != null) {
throw ret.getException();
}
}
void prepareInc2AcidData(String dbName, HiveConf hiveConf) throws Throwable {
IDriver driver = DriverFactory.newDriver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
runUsingDriver(driver, "use " + dbName);
runUsingDriver(driver, "insert into t1 values (3)");
runUsingDriver(driver, "insert into t5 values (4444)");
}
private void verifyInc2AcidLoad(String dbName) throws Throwable {
replica.run("use " + dbName)
.run("select id from t6 order by id")
.verifyResults(new String[]{"1", "2"})
.run("select country from t2 order by country")
.verifyResults(new String[] {"france", "india", "us"})
.run("select distinct placetype from t2")
.verifyResult("city")
.run("select id from t1 order by id")
.verifyResults(new String[]{"1", "2", "3"})
.run("select rank from t3 order by rank")
.verifyResults(new String[] {"11", "22"})
.run("select id from t5 order by id")
.verifyResults(new String[] {"1111", "2222", "3333", "4444"});
}
void prepareIncNonAcidData(String dbName) throws Throwable {
primary.run("use " + dbName)
.run("insert into t4 values (333)")
.run("create table t7 (str string)")
.run("insert into t7 values ('aaa')");
nonAcidTableNames.add("t7");
}
private void verifyIncNonAcidLoad(String dbName) throws Throwable {
replica.run("use " + dbName)
.run("select * from t4 order by id")
.verifyResults(new String[] {"111", "222", "333"})
.run("select * from t7")
.verifyResult("aaa");
}
void prepareInc2NonAcidData(String dbName, HiveConf hiveConf) throws Throwable {
IDriver driver = DriverFactory.newDriver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
runUsingDriver(driver, "use " + dbName);
runUsingDriver(driver, "insert into t4 values (444)");
runUsingDriver(driver, "insert into t7 values ('bbb')");
}
private void verifyInc2NonAcidLoad(String dbName) throws Throwable {
replica.run("use " + dbName)
.run("select * from t4 order by id")
.verifyResults(new String[] {"111", "222", "333", "444"})
.run("select * from t7")
.verifyResults(new String[] {"aaa", "bbb"});
}
void verifyIncLoad(String dbName, String lastReplId)
throws Throwable {
List<String> tableNames = new LinkedList<>(nonAcidTableNames);
tableNames.addAll(acidTableNames);
replica.run("use " + dbName)
.run("show tables")
.verifyResults(tableNames)
.run("repl status " + dbName)
.verifyResult(lastReplId);
verifyIncNonAcidLoad(dbName);
verifyIncAcidLoad(dbName);
}
void verifyInc2Load(String dbName, String lastReplId)
throws Throwable {
List<String> tableNames = new LinkedList<>(nonAcidTableNames);
tableNames.addAll(acidTableNames);
replica.run("use " + dbName)
.run("show tables")
.verifyResults(tableNames)
.run("repl status " + dbName)
.verifyResult(lastReplId);
verifyInc2NonAcidLoad(dbName);
verifyInc2AcidLoad(dbName);
}
List<Long> openTxns(int numTxns, TxnStore txnHandler, HiveConf primaryConf) throws Throwable {
OpenTxnsResponse otResp = txnHandler.openTxns(new OpenTxnRequest(numTxns, "u1", "localhost"));
List<Long> txns = otResp.getTxn_ids();
String txnIdRange = " txn_id >= " + txns.get(0) + " and txn_id <= " + txns.get(numTxns - 1);
Assert.assertEquals(TxnDbUtil.queryToString(primaryConf, "select * from TXNS"),
numTxns, TxnDbUtil.countQueryAgent(primaryConf,
"select count(*) from TXNS where txn_state = 'o' and " + txnIdRange));
return txns;
}
void allocateWriteIdsForTables(String primaryDbName, Map<String, Long> tables,
TxnStore txnHandler,
List<Long> txns, HiveConf primaryConf) throws Throwable {
AllocateTableWriteIdsRequest rqst = new AllocateTableWriteIdsRequest();
rqst.setDbName(primaryDbName);
for(Map.Entry<String, Long> entry : tables.entrySet()) {
rqst.setTableName(entry.getKey());
rqst.setTxnIds(txns);
txnHandler.allocateTableWriteIds(rqst);
}
verifyWriteIdsForTables(tables, primaryConf, primaryDbName);
}
void verifyWriteIdsForTables(Map<String, Long> tables, HiveConf conf, String dbName)
throws Throwable {
for(Map.Entry<String, Long> entry : tables.entrySet()) {
Assert.assertEquals(TxnDbUtil.queryToString(conf, "select * from TXN_TO_WRITE_ID"),
entry.getValue().longValue(),
TxnDbUtil.countQueryAgent(conf,
"select count(*) from TXN_TO_WRITE_ID where t2w_database = '"
+ dbName.toLowerCase()
+ "' and t2w_table = '" + entry.getKey() + "'"));
}
}
void verifyAllOpenTxnsAborted(List<Long> txns, HiveConf primaryConf) throws Throwable {
int numTxns = txns.size();
String txnIdRange = " txn_id >= " + txns.get(0) + " and txn_id <= " + txns.get(numTxns - 1);
Assert.assertEquals(TxnDbUtil.queryToString(primaryConf, "select * from TXNS"),
0, TxnDbUtil.countQueryAgent(primaryConf,
"select count(*) from TXNS where txn_state = 'o' and " + txnIdRange));
Assert.assertEquals(TxnDbUtil.queryToString(primaryConf, "select * from TXNS"),
numTxns, TxnDbUtil.countQueryAgent(primaryConf,
"select count(*) from TXNS where txn_state = 'a' and " + txnIdRange));
}
void verifyNextId(Map<String, Long> tables, String dbName, HiveConf conf) throws Throwable {
// Verify the next write id
for(Map.Entry<String, Long> entry : tables.entrySet()) {
String[] nextWriteId =
TxnDbUtil.queryToString(conf,
"select nwi_next from NEXT_WRITE_ID where nwi_database = '"
+ dbName.toLowerCase() + "' and nwi_table = '"
+ entry.getKey() + "'").split("\n");
Assert.assertEquals(Long.parseLong(nextWriteId[1].trim()), entry.getValue() + 1);
}
}
void verifyCompactionQueue(Map<String, Long> tables, String dbName, HiveConf conf)
throws Throwable {
for(Map.Entry<String, Long> entry : tables.entrySet()) {
Assert.assertEquals(TxnDbUtil.queryToString(conf, "select * from COMPACTION_QUEUE"),
entry.getValue().longValue(),
TxnDbUtil.countQueryAgent(conf,
"select count(*) from COMPACTION_QUEUE where cq_database = '" + dbName
+ "' and cq_table = '" + entry.getKey() + "'"));
}
}
}
| |
package org.lemsml.jlems.viz.plot;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import javax.swing.JFrame;
import org.lemsml.jlems.core.logging.E;
public class DataDisplay extends BasePanel implements ModeSettable, Repaintable, PaintListener {
static final long serialVersionUID = 1001;
XAxisCanvas xAxisCanvas;
YAxisCanvas yAxisCanvas;
PickWorldCanvas pwCanvas;
CornerPanel cornerPanel;
BasePanel controlPanel;
AboveBelowSplitPanel ab1;
AboveBelowSplitPanel ab2;
LeftRightSplitPanel lr;
GraphColors gcols;
RangeWatcher rangeWatcher;
static boolean interactive = true;
public static void setBatch() {
interactive = false;
}
public DataDisplay() {
super();
gcols = new GraphColors();
int leftmargin = 64;
int bottommargin = 32;
xAxisCanvas = new XAxisCanvas();
yAxisCanvas = new YAxisCanvas();
xAxisCanvas.setMinimumSize(new Dimension(200, bottommargin));
yAxisCanvas.setMinimumSize(new Dimension(leftmargin, 200));
xAxisCanvas.setPreferredSize(new Dimension(200, bottommargin));
yAxisCanvas.setPreferredSize(new Dimension(leftmargin, 200));
pwCanvas = new PickWorldCanvas(interactive);
cornerPanel = new CornerPanel();
ab1 = new AboveBelowSplitPanel(yAxisCanvas, cornerPanel, gcols);
ab2 = new AboveBelowSplitPanel(pwCanvas, xAxisCanvas, gcols);
ab1.setResizeWeight(0.95);
ab2.setResizeWeight(0.95);
lr = new LeftRightSplitPanel(ab1, ab2, gcols);
lr.setResizeWeight(0.0);
setLayout(new BorderLayout(0, 0));
add("Center", lr);
pwCanvas.addRangeListener(xAxisCanvas);
pwCanvas.addRangeListener(yAxisCanvas);
pwCanvas.addPaintListener(this);
}
public void repaintAll() {
xAxisCanvas.invalidate();
yAxisCanvas.invalidate();
validateTree();
xAxisCanvas.repaint();
yAxisCanvas.repaint();
repaint();
}
public void painted() {
int hc = pwCanvas.getHeight();
int ha = yAxisCanvas.getHeight();
if (ha != hc) {
ab1.setDividerLocation(hc);
}
}
public void setBg(Color c) {
setDataBg(c);
setBorderBg(c.brighter());
}
public void setDataBg(Color c) {
gcols.setGraphBg(c);
pwCanvas.setBg(c);
}
public void setBorderBg(Color c) {
gcols.setBorderBg(c);
xAxisCanvas.setBg(c);
yAxisCanvas.setBg(c);
cornerPanel.setBg(c);
}
public void setMode(String dom, String mod) {
pwCanvas.setMode(dom, mod);
}
public void setMode(String dom, boolean b) {
pwCanvas.setMode(dom, b);
}
public void setPaintInstructor(PaintInstructor pi) {
pwCanvas.setPaintInstructor(pi);
}
public void setBuildPaintInstructor(BuildPaintInstructor bpi) {
pwCanvas.setBuildPaintInstructor(bpi);
}
public void setPickListener(PickListener pl) {
pwCanvas.setPickListener(pl);
}
public void attach(Object obj) {
boolean done = false;
if (obj instanceof BuildPaintInstructor) {
setBuildPaintInstructor((BuildPaintInstructor)obj);
done = true;
} else if (obj instanceof PaintInstructor) {
setPaintInstructor((PaintInstructor)obj);
done = true;
}
if (obj instanceof PickListener) {
setPickListener((PickListener)obj);
done = true;
}
if (!done) {
E.error("cant attach " + obj + " to a data XDisplay");
}
}
public void setXAxisLabel(String lab) {
xAxisCanvas.setLabel(lab);
}
public void setYAxisLabel(String lab) {
yAxisCanvas.setLabel(lab);
}
public void setXAxis(String lab, double min, double max) {
setXAxisLabel(lab);
setXRange(min, max);
}
public void setYAxis(String lab, double min, double max) {
setYAxisLabel(lab);
setYRange(min, max);
}
public void setYRange(double min, double max) {
pwCanvas.setYRange(min, max);
}
public void setXXYYLimits(double[] d) {
double[] xyxy = {d[0], d[2], d[1], d[3]};
setLimits(xyxy);
}
public void setLimits(double[] xyxy) {
pwCanvas.syncSize();
pwCanvas.setXRange(xyxy[0], xyxy[2]);
pwCanvas.setYRange(xyxy[1], xyxy[3]);
pwCanvas.requestRepaint();
}
public void setXRange(double low, double high) {
pwCanvas.setXRange(low, high);
}
public double[] getXRange() {
return pwCanvas.getXRange();
}
public double[] getYRange() {
return pwCanvas.getYRange();
}
public void setFixedAspectRatio(double ar) {
pwCanvas.setFixedAspectRatio(ar);
}
public void viewChanged() {
if (pwCanvas != null) {
pwCanvas.repaint();
}
}
public void reframe() {
pwCanvas.reframe();
}
public static void main(String[] argv) {
JFrame f = new JFrame();
DataDisplay dataDisplay = new DataDisplay();
f.setPreferredSize(new Dimension(500, 300));
dataDisplay.setPaintInstructor(new Demo1());
f.getContentPane().add(dataDisplay);
f.pack();
f.setVisible(true);
}
public void setColorRange(double cmin, double cmax) {
pwCanvas.setColorRange(cmin, cmax);
}
public void setColorTable(Color[] ac) {
pwCanvas.setColorTable(ac);
}
public void syncSizes() {
pwCanvas.syncSize();
}
public void frameData() {
pwCanvas.reframe();
}
public void requestRepaint() {
pwCanvas.requestRepaint();
}
public void setCursor(String string) {
// TODO Auto-generated method stub
}
}
| |
package org.apache.lucene.expressions;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.CheckHits;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** simple demo of using expressions */
@SuppressCodecs("Lucene3x")
public class TestDemoExpressions extends LuceneTestCase {
IndexSearcher searcher;
DirectoryReader reader;
Directory dir;
@Override
public void setUp() throws Exception {
super.setUp();
dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newStringField("id", "1", Field.Store.YES));
doc.add(newTextField("body", "some contents and more contents", Field.Store.NO));
doc.add(new NumericDocValuesField("popularity", 5));
doc.add(new DoubleField("latitude", 40.759011, Field.Store.NO));
doc.add(new DoubleField("longitude", -73.9844722, Field.Store.NO));
iw.addDocument(doc);
doc = new Document();
doc.add(newStringField("id", "2", Field.Store.YES));
doc.add(newTextField("body", "another document with different contents", Field.Store.NO));
doc.add(new NumericDocValuesField("popularity", 20));
doc.add(new DoubleField("latitude", 40.718266, Field.Store.NO));
doc.add(new DoubleField("longitude", -74.007819, Field.Store.NO));
iw.addDocument(doc);
doc = new Document();
doc.add(newStringField("id", "3", Field.Store.YES));
doc.add(newTextField("body", "crappy contents", Field.Store.NO));
doc.add(new NumericDocValuesField("popularity", 2));
doc.add(new DoubleField("latitude", 40.7051157, Field.Store.NO));
doc.add(new DoubleField("longitude", -74.0088305, Field.Store.NO));
iw.addDocument(doc);
reader = iw.getReader();
searcher = new IndexSearcher(reader);
iw.close();
}
@Override
public void tearDown() throws Exception {
reader.close();
dir.close();
super.tearDown();
}
/** an example of how to rank by an expression */
public void test() throws Exception {
// compile an expression:
Expression expr = JavascriptCompiler.compile("sqrt(_score) + ln(popularity)");
// we use SimpleBindings: which just maps variables to SortField instances
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("_score", SortField.Type.SCORE));
bindings.add(new SortField("popularity", SortField.Type.INT));
// create a sort field and sort by it (reverse order)
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
searcher.search(query, null, 3, sort);
}
/** tests the returned sort values are correct */
public void testSortValues() throws Exception {
Expression expr = JavascriptCompiler.compile("sqrt(_score)");
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("_score", SortField.Type.SCORE));
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = (float) Math.sqrt(d.score);
float actual = ((Double)d.fields[0]).floatValue();
assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual));
}
}
/** tests same binding used more than once in an expression */
public void testTwoOfSameBinding() throws Exception {
Expression expr = JavascriptCompiler.compile("_score + _score");
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("_score", SortField.Type.SCORE));
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = 2*d.score;
float actual = ((Double)d.fields[0]).floatValue();
assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual));
}
}
/** tests expression referring to another expression */
public void testExpressionRefersToExpression() throws Exception {
Expression expr1 = JavascriptCompiler.compile("_score");
Expression expr2 = JavascriptCompiler.compile("2*expr1");
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("_score", SortField.Type.SCORE));
bindings.add("expr1", expr1);
Sort sort = new Sort(expr2.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = 2*d.score;
float actual = ((Double)d.fields[0]).floatValue();
assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual));
}
}
/** tests huge amounts of variables in the expression */
public void testLotsOfBindings() throws Exception {
doTestLotsOfBindings(Byte.MAX_VALUE-1);
doTestLotsOfBindings(Byte.MAX_VALUE);
doTestLotsOfBindings(Byte.MAX_VALUE+1);
// TODO: ideally we'd test > Short.MAX_VALUE too, but compilation is currently recursive.
// so if we want to test such huge expressions, we need to instead change parser to use an explicit Stack
}
private void doTestLotsOfBindings(int n) throws Exception {
SimpleBindings bindings = new SimpleBindings();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
if (i > 0) {
sb.append("+");
}
sb.append("x" + i);
bindings.add(new SortField("x" + i, SortField.Type.SCORE));
}
Expression expr = JavascriptCompiler.compile(sb.toString());
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = n*d.score;
float actual = ((Double)d.fields[0]).floatValue();
assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual));
}
}
public void testDistanceSort() throws Exception {
Expression distance = JavascriptCompiler.compile("haversin(40.7143528,-74.0059731,latitude,longitude)");
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("latitude", SortField.Type.DOUBLE));
bindings.add(new SortField("longitude", SortField.Type.DOUBLE));
Sort sort = new Sort(distance.getSortField(bindings, false));
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort);
FieldDoc d = (FieldDoc) td.scoreDocs[0];
assertEquals(0.4621D, (Double)d.fields[0], 1E-4);
d = (FieldDoc) td.scoreDocs[1];
assertEquals(1.0550D, (Double)d.fields[0], 1E-4);
d = (FieldDoc) td.scoreDocs[2];
assertEquals(5.2859D, (Double)d.fields[0], 1E-4);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.rounding;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Test;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*/
public class TimeZoneRoundingTests extends ESTestCase {
final static DateTimeZone JERUSALEM_TIMEZONE = DateTimeZone.forID("Asia/Jerusalem");
@Test
public void testUTCTimeUnitRounding() {
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-01T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-01T00:00:00.000Z")), equalTo(utc("2009-03-01T00:00:00.000Z")));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).build();
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).offset(-TimeValue.timeValueHours(24).millis()).build();
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
}
@Test
public void testUTCIntervalRounding() {
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(12)).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T00:00:00.000Z")));
long roundKey = tzRounding.roundKey(utc("2009-02-03T01:01:01"));
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-03T00:00:00.000Z"))));
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-03T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T00:00:00.000Z")), equalTo(utc("2009-02-03T12:00:00.000Z")));
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T12:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T12:00:00.000Z")), equalTo(utc("2009-02-04T00:00:00.000Z")));
tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(48)).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T00:00:00.000Z")), equalTo(utc("2009-02-05T00:00:00.000Z")));
assertThat(tzRounding.round(utc("2009-02-05T13:01:01")), equalTo(utc("2009-02-05T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-05T00:00:00.000Z")), equalTo(utc("2009-02-07T00:00:00.000Z")));
}
/**
* test TimeIntervalTimeZoneRounding, (interval < 12h) with time zone shift
*/
@Test
public void testTimeIntervalTimeZoneRounding() {
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(6)).timeZone(DateTimeZone.forOffsetHours(-1)).build();
assertThat(tzRounding.round(utc("2009-02-03T00:01:01")), equalTo(utc("2009-02-02T19:00:00.000Z")));
long roundKey = tzRounding.roundKey(utc("2009-02-03T00:01:01"));
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T19:00:00.000Z"))));
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T19:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T19:00:00.000Z")), equalTo(utc("2009-02-03T01:00:00.000Z")));
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T13:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T13:00:00.000Z")), equalTo(utc("2009-02-03T19:00:00.000Z")));
}
/**
* test DayIntervalTimeZoneRounding, (interval >= 12h) with time zone shift
*/
@Test
public void testDayIntervalTimeZoneRounding() {
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(12)).timeZone(DateTimeZone.forOffsetHours(-8)).build();
assertThat(tzRounding.round(utc("2009-02-03T00:01:01")), equalTo(utc("2009-02-02T20:00:00.000Z")));
long roundKey = tzRounding.roundKey(utc("2009-02-03T00:01:01"));
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T20:00:00.000Z"))));
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T20:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T20:00:00.000Z")), equalTo(utc("2009-02-03T08:00:00.000Z")));
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T08:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T08:00:00.000Z")), equalTo(utc("2009-02-03T20:00:00.000Z")));
}
@Test
public void testDayTimeZoneRounding() {
int timezoneOffset = -2;
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forOffsetHours(timezoneOffset))
.build();
assertThat(tzRounding.round(0), equalTo(0l - TimeValue.timeValueHours(24 + timezoneOffset).millis()));
assertThat(tzRounding.nextRoundingValue(0l - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(0l - TimeValue
.timeValueHours(timezoneOffset).millis()));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-08:00")).build();
assertThat(tzRounding.round(utc("2012-04-01T04:15:30Z")), equalTo(utc("2012-03-31T08:00:00Z")));
assertThat(toUTCDateString(tzRounding.nextRoundingValue(utc("2012-03-31T08:00:00Z"))),
equalTo(toUTCDateString(utc("2012-04-01T08:0:00Z"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(DateTimeZone.forID("-08:00")).build();
assertThat(tzRounding.round(utc("2012-04-01T04:15:30Z")), equalTo(utc("2012-03-01T08:00:00Z")));
assertThat(toUTCDateString(tzRounding.nextRoundingValue(utc("2012-03-01T08:00:00Z"))),
equalTo(toUTCDateString(utc("2012-04-01T08:0:00Z"))));
// date in Feb-3rd, but still in Feb-2nd in -02:00 timezone
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-02:00")).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-02T02:00:00")));
long roundKey = tzRounding.roundKey(utc("2009-02-03T01:01:01"));
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T02:00:00.000Z"))));
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T02:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T02:00:00")), equalTo(utc("2009-02-03T02:00:00")));
// date in Feb-3rd, also in -02:00 timezone
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-02:00")).build();
assertThat(tzRounding.round(utc("2009-02-03T02:01:01")), equalTo(utc("2009-02-03T02:00:00")));
roundKey = tzRounding.roundKey(utc("2009-02-03T02:01:01"));
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-03T02:00:00.000Z"))));
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-03T02:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T02:00:00")), equalTo(utc("2009-02-04T02:00:00")));
}
@Test
public void testTimeTimeZoneRounding() {
// hour unit
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
assertThat(tzRounding.round(0), equalTo(0l));
assertThat(tzRounding.nextRoundingValue(0l), equalTo(TimeValue.timeValueHours(1l).getMillis()));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T01:00:00")));
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T01:00:00")), equalTo(utc("2009-02-03T02:00:00")));
}
@Test
public void testTimeUnitRoundingDST() {
Rounding tzRounding;
// testing savings to non savings switch
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))),
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))),
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET"))));
// testing non savings to savings switch
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))),
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))),
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET"))));
// testing non savings to savings switch (America/Chicago)
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
assertThat(tzRounding.round(time("2014-03-09T03:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2014-03-09T03:00:00", DateTimeZone.forID("America/Chicago"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
assertThat(tzRounding.round(time("2014-03-09T03:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2014-03-09T03:00:00", DateTimeZone.forID("America/Chicago"))));
// testing savings to non savings switch 2013 (America/Chicago)
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
assertThat(tzRounding.round(time("2013-11-03T06:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2013-11-03T06:00:00", DateTimeZone.forID("America/Chicago"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
assertThat(tzRounding.round(time("2013-11-03T06:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2013-11-03T06:00:00", DateTimeZone.forID("America/Chicago"))));
// testing savings to non savings switch 2014 (America/Chicago)
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
assertThat(tzRounding.round(time("2014-11-02T06:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2014-11-02T06:00:00", DateTimeZone.forID("America/Chicago"))));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
assertThat(tzRounding.round(time("2014-11-02T06:01:01", DateTimeZone.forID("America/Chicago"))),
equalTo(time("2014-11-02T06:00:00", DateTimeZone.forID("America/Chicago"))));
}
/**
* randomized test on TimeUnitRounding with random time units and time zone offsets
*/
@Test
public void testTimeZoneRoundingRandom() {
for (int i = 0; i < 1000; ++i) {
DateTimeUnit timeUnit = randomTimeUnit();
TimeZoneRounding rounding;
int timezoneOffset = randomIntBetween(-23, 23);
rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, DateTimeZone.forOffsetHours(timezoneOffset));
long date = Math.abs(randomLong() % ((long) 10e11));
final long roundedDate = rounding.round(date);
final long nextRoundingValue = rounding.nextRoundingValue(roundedDate);
assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate)));
assertThat("Rounded value smaller or equal than unrounded, regardless of timezone", roundedDate, lessThanOrEqualTo(date));
assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate));
assertThat("NextRounding value should be a rounded date", nextRoundingValue, equalTo(rounding.round(nextRoundingValue)));
}
}
/**
* randomized test on TimeIntervalRounding with random interval and time zone offsets
*/
@Test
public void testIntervalRoundingRandom() {
for (int i = 0; i < 1000; ++i) {
// max random interval is a year, can be negative
long interval = Math.abs(randomLong() % (TimeUnit.DAYS.toMillis(365)));
TimeZoneRounding rounding;
int timezoneOffset = randomIntBetween(-23, 23);
rounding = new TimeZoneRounding.TimeIntervalRounding(interval, DateTimeZone.forOffsetHours(timezoneOffset));
long date = Math.abs(randomLong() % ((long) 10e11));
final long roundedDate = rounding.round(date);
final long nextRoundingValue = rounding.nextRoundingValue(roundedDate);
assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate)));
assertThat("Rounded value smaller or equal than unrounded, regardless of timezone", roundedDate, lessThanOrEqualTo(date));
assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate));
assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, equalTo(interval));
assertThat("NextRounding value should be a rounded date", nextRoundingValue, equalTo(rounding.round(nextRoundingValue)));
}
}
/**
* special test for DST switch from #9491
*/
@Test
public void testAmbiguousHoursAfterDSTSwitch() {
Rounding tzRounding;
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(JERUSALEM_TIMEZONE).build();
// Both timestamps "2014-10-25T22:30:00Z" and "2014-10-25T23:30:00Z" are "2014-10-26T01:30:00" in local time because
// of DST switch between them. This test checks that they are both returned to their correct UTC time after rounding.
assertThat(tzRounding.round(time("2014-10-25T22:30:00", DateTimeZone.UTC)), equalTo(time("2014-10-25T22:00:00", DateTimeZone.UTC)));
assertThat(tzRounding.round(time("2014-10-25T23:30:00", DateTimeZone.UTC)), equalTo(time("2014-10-25T23:00:00", DateTimeZone.UTC)));
// Day interval
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(JERUSALEM_TIMEZONE).build();
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-11-11T00:00:00", JERUSALEM_TIMEZONE)));
// DST on
assertThat(tzRounding.round(time("2014-08-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-08-11T00:00:00", JERUSALEM_TIMEZONE)));
// Day of switching DST on -> off
assertThat(tzRounding.round(time("2014-10-26T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-10-26T00:00:00", JERUSALEM_TIMEZONE)));
// Day of switching DST off -> on
assertThat(tzRounding.round(time("2015-03-27T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2015-03-27T00:00:00", JERUSALEM_TIMEZONE)));
// Month interval
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(JERUSALEM_TIMEZONE).build();
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-11-01T00:00:00", JERUSALEM_TIMEZONE)));
// DST on
assertThat(tzRounding.round(time("2014-10-10T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-10-01T00:00:00", JERUSALEM_TIMEZONE)));
// Year interval
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(JERUSALEM_TIMEZONE).build();
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-01-01T00:00:00", JERUSALEM_TIMEZONE)));
// Two timestamps in same year and different timezone offset ("Double buckets" issue - #9491)
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(JERUSALEM_TIMEZONE).build();
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)),
equalTo(tzRounding.round(time("2014-08-11T17:00:00", JERUSALEM_TIMEZONE))));
}
/**
* test for #10025, strict local to UTC conversion can cause joda exceptions
* on DST start
*/
@Test
public void testLenientConversionDST() {
DateTimeZone tz = DateTimeZone.forID("America/Sao_Paulo");
long start = time("2014-10-18T20:50:00.000", tz);
long end = time("2014-10-19T01:00:00.000", tz);
Rounding tzRounding = new TimeZoneRounding.TimeUnitRounding(DateTimeUnit.MINUTES_OF_HOUR, tz);
Rounding dayTzRounding = new TimeZoneRounding.TimeIntervalRounding(60000, tz);
for (long time = start; time < end; time = time + 60000) {
assertThat(tzRounding.nextRoundingValue(time), greaterThan(time));
assertThat(dayTzRounding.nextRoundingValue(time), greaterThan(time));
}
}
private DateTimeUnit randomTimeUnit() {
byte id = (byte) randomIntBetween(1, 8);
return DateTimeUnit.resolve(id);
}
private String toUTCDateString(long time) {
return new DateTime(time, DateTimeZone.UTC).toString();
}
private long utc(String time) {
return time(time, DateTimeZone.UTC);
}
private long time(String time, DateTimeZone zone) {
return ISODateTimeFormat.dateOptionalTimeParser().withZone(zone).parseMillis(time);
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.cxx.toolchain.DebugPathSanitizer;
import com.facebook.buck.cxx.toolchain.LinkerMapMode;
import com.facebook.buck.cxx.toolchain.StripStyle;
import com.facebook.buck.io.BuildCellRelativePath;
import com.facebook.buck.io.file.MorePaths;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.CacheableBuildRule;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.ExplicitBuildTargetSourcePath;
import com.facebook.buck.rules.RuleKeyObjectSink;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SymlinkTree;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.keys.SupportsDependencyFileRuleKey;
import com.facebook.buck.rules.keys.SupportsInputBasedRuleKey;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.SortedSet;
import java.util.function.Predicate;
/** A build rule which preprocesses and/or compiles a C/C++ source in a single step. */
public class CxxPreprocessAndCompile extends AbstractBuildRule
implements SupportsInputBasedRuleKey, SupportsDependencyFileRuleKey, CacheableBuildRule {
private final ImmutableSortedSet<BuildRule> buildDeps;
/** The presence or absence of this field denotes whether the input needs to be preprocessed. */
@AddToRuleKey private final Optional<PreprocessorDelegate> preprocessDelegate;
@AddToRuleKey private final CompilerDelegate compilerDelegate;
@AddToRuleKey(stringify = true)
private final Path output;
@AddToRuleKey private final SourcePath input;
private final Optional<CxxPrecompiledHeader> precompiledHeaderRule;
private final CxxSource.Type inputType;
private final DebugPathSanitizer sanitizer;
private final Optional<SymlinkTree> sandboxTree;
private CxxPreprocessAndCompile(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
ImmutableSortedSet<BuildRule> buildDeps,
Optional<PreprocessorDelegate> preprocessDelegate,
CompilerDelegate compilerDelegate,
Path output,
SourcePath input,
CxxSource.Type inputType,
Optional<CxxPrecompiledHeader> precompiledHeaderRule,
DebugPathSanitizer sanitizer,
Optional<SymlinkTree> sandboxTree) {
super(buildTarget, projectFilesystem);
this.buildDeps = buildDeps;
this.sandboxTree = sandboxTree;
if (precompiledHeaderRule.isPresent()) {
Preconditions.checkState(
preprocessDelegate.isPresent(),
"Precompiled headers are only used when compilation includes preprocessing.");
}
this.preprocessDelegate = preprocessDelegate;
this.compilerDelegate = compilerDelegate;
this.output = output;
this.input = input;
this.inputType = inputType;
this.precompiledHeaderRule = precompiledHeaderRule;
this.sanitizer = sanitizer;
Preconditions.checkArgument(
!buildTarget.getFlavors().contains(CxxStrip.RULE_FLAVOR)
|| !StripStyle.FLAVOR_DOMAIN.containsAnyOf(buildTarget.getFlavors()),
"CxxPreprocessAndCompile should not be created with CxxStrip flavors");
Preconditions.checkArgument(
!LinkerMapMode.FLAVOR_DOMAIN.containsAnyOf(buildTarget.getFlavors()),
"CxxPreprocessAndCompile %s should not be created with LinkerMapMode flavor (%s)",
this,
LinkerMapMode.FLAVOR_DOMAIN);
}
/** @return a {@link CxxPreprocessAndCompile} step that compiles the given preprocessed source. */
public static CxxPreprocessAndCompile compile(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
ImmutableSortedSet<BuildRule> buildDeps,
CompilerDelegate compilerDelegate,
Path output,
SourcePath input,
CxxSource.Type inputType,
DebugPathSanitizer sanitizer,
Optional<SymlinkTree> sandboxTree) {
return new CxxPreprocessAndCompile(
buildTarget,
projectFilesystem,
buildDeps,
Optional.empty(),
compilerDelegate,
output,
input,
inputType,
Optional.empty(),
sanitizer,
sandboxTree);
}
/**
* @return a {@link CxxPreprocessAndCompile} step that preprocesses and compiles the given source.
*/
public static CxxPreprocessAndCompile preprocessAndCompile(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
ImmutableSortedSet<BuildRule> buildDeps,
PreprocessorDelegate preprocessorDelegate,
CompilerDelegate compilerDelegate,
Path output,
SourcePath input,
CxxSource.Type inputType,
Optional<CxxPrecompiledHeader> precompiledHeaderRule,
DebugPathSanitizer sanitizer,
Optional<SymlinkTree> sandboxTree) {
return new CxxPreprocessAndCompile(
buildTarget,
projectFilesystem,
buildDeps,
Optional.of(preprocessorDelegate),
compilerDelegate,
output,
input,
inputType,
precompiledHeaderRule,
sanitizer,
sandboxTree);
}
@Override
public void appendToRuleKey(RuleKeyObjectSink sink) {
// If a sanitizer is being used for compilation, we need to record the working directory in
// the rule key, as changing this changes the generated object file.
if (preprocessDelegate.isPresent()) {
sink.setReflectively("compilationDirectory", sanitizer.getCompilationDirectory());
}
if (sandboxTree.isPresent()) {
ImmutableMap<Path, SourcePath> links = sandboxTree.get().getLinks();
for (Path path : ImmutableSortedSet.copyOf(links.keySet())) {
SourcePath source = links.get(path);
sink.setReflectively("sandbox(" + path + ")", source);
}
}
precompiledHeaderRule.ifPresent(
cxxPrecompiledHeader ->
sink.setReflectively("precompiledHeaderRuleInput", cxxPrecompiledHeader.getInput()));
}
private Path getDepFilePath() {
return output.getFileSystem().getPath(output + ".dep");
}
@VisibleForTesting
CxxPreprocessAndCompileStep makeMainStep(
SourcePathResolver resolver, Path scratchDir, boolean useArgfile) {
// If we're compiling, this will just be empty.
HeaderPathNormalizer headerPathNormalizer =
preprocessDelegate
.map(PreprocessorDelegate::getHeaderPathNormalizer)
.orElseGet(() -> HeaderPathNormalizer.empty(resolver));
ImmutableList<Arg> arguments =
compilerDelegate.getArguments(
preprocessDelegate
.map(delegate -> delegate.getFlagsWithSearchPaths(precompiledHeaderRule))
.orElseGet(CxxToolFlags::of),
getBuildTarget().getCellPath());
Path relativeInputPath = getRelativeInputPath(resolver);
return new CxxPreprocessAndCompileStep(
getProjectFilesystem(),
preprocessDelegate.isPresent()
? CxxPreprocessAndCompileStep.Operation.PREPROCESS_AND_COMPILE
: CxxPreprocessAndCompileStep.Operation.COMPILE,
output,
// Use a depfile if there's a preprocessing stage, this logic should be kept in sync with
// getInputsAfterBuildingLocally.
preprocessDelegate.isPresent() ? Optional.of(getDepFilePath()) : Optional.empty(),
relativeInputPath,
inputType,
new CxxPreprocessAndCompileStep.ToolCommand(
compilerDelegate.getCommandPrefix(resolver),
Arg.stringify(arguments, resolver),
compilerDelegate.getEnvironment(resolver)),
headerPathNormalizer,
sanitizer,
scratchDir,
useArgfile,
compilerDelegate.getCompiler(),
Optional.of(
CxxLogInfo.builder()
.setTarget(getBuildTarget())
.setSourcePath(relativeInputPath)
.setOutputPath(output)
.build()));
}
public Path getRelativeInputPath(SourcePathResolver resolver) {
// For caching purposes, the path passed to the compiler is relativized by the absolute path by
// the current cell root, so that file references emitted by the compiler would not change if
// the repo is checked out into different places on disk.
return getProjectFilesystem().getRootPath().relativize(resolver.getAbsolutePath(input));
}
@Override
public String getType() {
return "cxx_preprocess_compile";
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context, BuildableContext buildableContext) {
preprocessDelegate.ifPresent(
delegate -> {
try {
CxxHeaders.checkConflictingHeaders(delegate.getCxxIncludePaths().getIPaths());
} catch (CxxHeaders.ConflictingHeadersException e) {
throw e.getHumanReadableExceptionForBuildTarget(getBuildTarget());
}
});
buildableContext.recordArtifact(output);
for (String flag :
Arg.stringify(
compilerDelegate.getCompilerFlags().getAllFlags(), context.getSourcePathResolver())) {
if (flag.equals("-ftest-coverage") && hasGcno(output)) {
buildableContext.recordArtifact(getGcnoPath(output));
break;
}
}
return new ImmutableList.Builder<Step>()
.add(
MkdirStep.of(
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), getProjectFilesystem(), output.getParent())))
.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), getProjectFilesystem(), getScratchPath())))
.add(
makeMainStep(
context.getSourcePathResolver(),
getScratchPath(),
compilerDelegate.isArgFileSupported()))
.build();
}
private static boolean hasGcno(Path output) {
return !MorePaths.getNameWithoutExtension(output).endsWith(".S");
}
@VisibleForTesting
static Path getGcnoPath(Path output) {
String basename = MorePaths.getNameWithoutExtension(output);
return output.getParent().resolve(basename + ".gcno");
}
private Path getScratchPath() {
return BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s-tmp");
}
@VisibleForTesting
Optional<PreprocessorDelegate> getPreprocessorDelegate() {
return preprocessDelegate;
}
// Used for compdb
public ImmutableList<String> getCommand(SourcePathResolver pathResolver) {
return makeMainStep(pathResolver, getScratchPath(), false).getCommand();
}
@Override
public SourcePath getSourcePathToOutput() {
return ExplicitBuildTargetSourcePath.of(getBuildTarget(), output);
}
public SourcePath getInput() {
return input;
}
@Override
public boolean useDependencyFileRuleKeys() {
return true;
}
@Override
public Predicate<SourcePath> getCoveredByDepFilePredicate(SourcePathResolver pathResolver) {
if (preprocessDelegate.isPresent()) {
return preprocessDelegate.get().getCoveredByDepFilePredicate();
}
return (SourcePath path) -> true;
}
@Override
public Predicate<SourcePath> getExistenceOfInterestPredicate(SourcePathResolver pathResolver) {
return (SourcePath path) -> false;
}
// see com.facebook.buck.cxx.AbstractCxxSourceRuleFactory.getSandboxedCxxSource()
private SourcePath getOriginalInput(SourcePathResolver sourcePathResolver) {
// The current logic of handling depfiles for cxx requires that all headers files and source
// files are "deciphered' from links from symlink tree to original locations.
// It already happens in Depfiles.parseAndOutputBuckCompatibleDepfile via header normalizer.
// This special case is for applying the same logic for an input cxx file in the case
// when cxx.sandbox_sources=true.
if (preprocessDelegate.isPresent()) {
Path absPath = sourcePathResolver.getAbsolutePath(input);
HeaderPathNormalizer headerPathNormalizer =
preprocessDelegate.get().getHeaderPathNormalizer();
Optional<Path> original = headerPathNormalizer.getAbsolutePathForUnnormalizedPath(absPath);
if (original.isPresent()) {
return headerPathNormalizer.getSourcePathForAbsolutePath(original.get());
}
}
return input;
}
@Override
public ImmutableList<SourcePath> getInputsAfterBuildingLocally(
BuildContext context, CellPathResolver cellPathResolver) throws IOException {
ImmutableList.Builder<SourcePath> inputs = ImmutableList.builder();
// If present, include all inputs coming from the preprocessor tool.
if (preprocessDelegate.isPresent()) {
Iterable<Path> dependencies;
try {
dependencies =
Depfiles.parseAndVerifyDependencies(
context.getEventBus(),
getProjectFilesystem(),
preprocessDelegate.get().getHeaderPathNormalizer(),
preprocessDelegate.get().getHeaderVerification(),
getDepFilePath(),
getRelativeInputPath(context.getSourcePathResolver()),
output,
compilerDelegate.getDependencyTrackingMode());
} catch (Depfiles.HeaderVerificationException e) {
throw new HumanReadableException(e);
}
inputs.addAll(preprocessDelegate.get().getInputsAfterBuildingLocally(dependencies));
}
// If present, include all inputs coming from the compiler tool.
inputs.addAll(compilerDelegate.getInputsAfterBuildingLocally());
if (precompiledHeaderRule.isPresent()) {
CxxPrecompiledHeader pch = precompiledHeaderRule.get();
inputs.addAll(pch.getInputsAfterBuildingLocally(context, cellPathResolver));
}
// Add the input.
inputs.add(getOriginalInput(context.getSourcePathResolver()));
return inputs.build();
}
@Override
public SortedSet<BuildRule> getBuildDeps() {
return buildDeps;
}
}
| |
package com.ferreusveritas.dynamictrees.blocks;
import java.util.List;
import com.ferreusveritas.dynamictrees.ModBlocks;
import com.ferreusveritas.dynamictrees.ModTabs;
import com.ferreusveritas.dynamictrees.api.TreeHelper;
import com.ferreusveritas.dynamictrees.entities.EntityFallingTree;
import com.ferreusveritas.dynamictrees.util.CoordUtils.Surround;
import net.minecraft.block.Block;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.properties.PropertyBool;
import net.minecraft.block.properties.PropertyEnum;
import net.minecraft.block.properties.PropertyInteger;
import net.minecraft.block.state.BlockFaceShape;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.init.Blocks;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.IStringSerializable;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.property.ExtendedBlockState;
import net.minecraftforge.common.property.IExtendedBlockState;
import net.minecraftforge.common.property.IUnlistedProperty;
import net.minecraftforge.common.property.Properties;
public class BlockSurfaceRoot extends Block {
public static final int RADMAX_NORMAL = 8;
protected static final PropertyInteger RADIUS = PropertyInteger.create("radius", 1, RADMAX_NORMAL);
public static final IUnlistedProperty GROUNDED = new Properties.PropertyAdapter<Boolean>(PropertyBool.create("grounded"));
public static final IUnlistedProperty CONNECTIONS[] = {
new Properties.PropertyAdapter<Integer>(PropertyInteger.create("radiuss", 0, 8)),
new Properties.PropertyAdapter<Integer>(PropertyInteger.create("radiusw", 0, 8)),
new Properties.PropertyAdapter<Integer>(PropertyInteger.create("radiusn", 0, 8)),
new Properties.PropertyAdapter<Integer>(PropertyInteger.create("radiuse", 0, 8))
};
public static final IUnlistedProperty LEVELS[] = {
new Properties.PropertyAdapter<ConnectionLevel>(PropertyEnum.create("levels", ConnectionLevel.class)),
new Properties.PropertyAdapter<ConnectionLevel>(PropertyEnum.create("levelw", ConnectionLevel.class)),
new Properties.PropertyAdapter<ConnectionLevel>(PropertyEnum.create("leveln", ConnectionLevel.class)),
new Properties.PropertyAdapter<ConnectionLevel>(PropertyEnum.create("levele", ConnectionLevel.class))
};
public BlockSurfaceRoot(Material material, String name) {
super(material);
setUnlocalizedName(name);
setRegistryName(name);
setDefaultState(this.blockState.getBaseState().withProperty(RADIUS, 1));
setHarvestLevel("axe", 0);
setSoundType(SoundType.WOOD);
setHardness(2.5f);
setResistance(1.0f);
setCreativeTab(ModTabs.dynamicTreesTab);
}
public enum ConnectionLevel implements IStringSerializable {
MID(0),
LOW(-1),
HIGH(1);
private final int yOffset;
private ConnectionLevel(int y) {
this.yOffset = y;
}
@Override
public String getName() {
return toString().toLowerCase();
}
public int getYOffset() {
return yOffset;
}
}
public class RootConnection {
public ConnectionLevel level;
public int radius;
public RootConnection(ConnectionLevel level, int radius) {
this.level = level;
this.radius = radius;
}
}
///////////////////////////////////////////
// BLOCKSTATES
///////////////////////////////////////////
@Override
protected BlockStateContainer createBlockState() {
IProperty[] listedProperties = { RADIUS };
IUnlistedProperty unlistedProperties[] = new IUnlistedProperty[] {
CONNECTIONS[0], CONNECTIONS[1], CONNECTIONS[2], CONNECTIONS[3],
LEVELS[0], LEVELS[1], LEVELS[2], LEVELS[3], GROUNDED
};
return new ExtendedBlockState(this, listedProperties, unlistedProperties);
}
public IProperty<?>[] getIgnorableProperties() {
return new IProperty<?>[]{ RADIUS };
}
@Override
public IBlockState getStateFromMeta(int meta) {
return this.getDefaultState().withProperty(RADIUS, (meta & 7) + 1);
}
@Override
public int getMetaFromState(IBlockState state) {
return state.getValue(RADIUS) - 1;
}
@Override
public IBlockState getExtendedState(IBlockState state, IBlockAccess world, BlockPos pos) {
if (state instanceof IExtendedBlockState) {
IExtendedBlockState retval = (IExtendedBlockState) state;
int thisRadius = getRadius(state);
retval = retval.withProperty(GROUNDED, world.isSideSolid(pos.down(), EnumFacing.UP, false));
for(EnumFacing dir: EnumFacing.HORIZONTALS) {
RootConnection conn = getSideConnectionRadius(world, pos, thisRadius, dir);
if(conn != null) {
int horIndex = dir.getHorizontalIndex();
retval = retval.withProperty(CONNECTIONS[horIndex], conn.radius).withProperty(LEVELS[horIndex], conn.level);
}
}
return retval;
}
return state;
}
public int getRadius(IBlockState blockState) {
return blockState.getBlock() == this ? blockState.getValue(RADIUS) : 0;
}
public int setRadius(World world, BlockPos pos, int radius, EnumFacing originDir, int flags) {
world.setBlockState(pos, getStateForRadius(radius), flags);
return radius;
}
public IBlockState getStateForRadius(int radius) {
return getDefaultState().withProperty(RADIUS, MathHelper.clamp(radius, 0, getMaxRadius()));
}
public int getMaxRadius() {
return RADMAX_NORMAL;
}
public int getRadialHeight(int radius) {
return radius * 2;
}
///////////////////////////////////////////
// RENDERING
///////////////////////////////////////////
@Override
public boolean isOpaqueCube(IBlockState state) {
return false;
}
@Override
public boolean isFullCube(IBlockState state) {
return false;
}
@Override
public boolean shouldSideBeRendered(IBlockState blockState, IBlockAccess blockAccess, BlockPos pos, EnumFacing side) {
return true;
}
@Override
public BlockFaceShape getBlockFaceShape(IBlockAccess worldIn, IBlockState state, BlockPos pos, EnumFacing face) {
return BlockFaceShape.UNDEFINED;//This prevents fences and walls from attempting to connect to branches.
}
///////////////////////////////////////////
// PHYSICAL BOUNDS
///////////////////////////////////////////
// This is only so effective because the center of the player must be inside the block that contains the tree trunk.
// The result is that only thin branches and trunks can be climbed
@Override
public boolean isLadder(IBlockState state, IBlockAccess world, BlockPos pos, EntityLivingBase entity) {
return false;
}
public static final Surround sidesFirst[] = new Surround[] { Surround.N, Surround.S, Surround.W, Surround.E, Surround.NW, Surround.NE, Surround.SW, Surround.SE };
@Override
public AxisAlignedBB getBoundingBox(IBlockState state, IBlockAccess blockAccess, BlockPos pos) {
if (state.getBlock() != this) {
return NULL_AABB;
}
AxisAlignedBB trunkBB = null;
for(Surround dir: sidesFirst ) {
BlockPos dPos = pos.add(dir.getOffset());
IBlockState testState = blockAccess.getBlockState(dPos);
if(testState.getBlock() instanceof BlockBranchThick) {
BlockBranchThick trunk = (BlockBranchThick) testState.getBlock();
trunkBB = trunk.getBoundingBox(testState, blockAccess, dPos).offset(dir.getOffsetPos()).intersect(FULL_BLOCK_AABB);
break;//There should only be one trunk in proximity
}
}
int thisRadius = getRadius(state);
int radialHeight = getRadialHeight(thisRadius);
double radius = thisRadius / 16.0;
double gap = 0.5 - radius;
AxisAlignedBB aabb = new AxisAlignedBB(-radius, 0, -radius, radius, radialHeight / 16d, radius);
for (EnumFacing dir : EnumFacing.VALUES) {
RootConnection conn = getSideConnectionRadius(blockAccess, pos, thisRadius, dir);
if (conn != null) {
aabb = aabb.expand(dir.getFrontOffsetX() * gap, dir.getFrontOffsetY() * gap, dir.getFrontOffsetZ() * gap);
if(conn.level == ConnectionLevel.HIGH) {
aabb = aabb.setMaxY(1.0 + (radialHeight / 16d));
}
}
}
aabb = aabb.offset(0.5, 0.0, 0.5);
return trunkBB != null ? trunkBB.union(aabb) : aabb;
}
@Override
public void addCollisionBoxToList(IBlockState state, World world, BlockPos pos, AxisAlignedBB entityBox, List<AxisAlignedBB> collidingBoxes, Entity entityIn, boolean p_185477_7_) {
if(entityIn instanceof EntityFallingTree) {
return;
}
boolean connectionMade = false;
int thisRadius = getRadius(state);
for (EnumFacing dir : EnumFacing.HORIZONTALS) {
RootConnection conn = getSideConnectionRadius(world, pos, thisRadius, dir);
if (conn != null) {
connectionMade = true;
int r = MathHelper.clamp(conn.radius, 1, thisRadius);
double radius = r / 16.0;
double radialHeight = getRadialHeight(r) / 16.0;
double gap = 0.5 - radius;
AxisAlignedBB aabb = new AxisAlignedBB(-radius, 0, -radius, radius, radialHeight, radius);
aabb = aabb.expand(dir.getFrontOffsetX() * gap, 0, dir.getFrontOffsetZ() * gap).offset(0.5, 0.0, 0.5);
addCollisionBoxToList(pos, entityBox, collidingBoxes, aabb);
}
}
if(!connectionMade) {
double radius = thisRadius / 16.0;
double radialHeight = getRadialHeight(thisRadius) / 16.0;
AxisAlignedBB aabb = new AxisAlignedBB(0.5 - radius, 0, 0.5 - radius, 0.5 + radius, radialHeight, 0.5 + radius);
addCollisionBoxToList(pos, entityBox, collidingBoxes, aabb);
}
}
protected RootConnection getSideConnectionRadius(IBlockAccess blockAccess, BlockPos pos, int radius, EnumFacing side) {
if(side.getAxis().isHorizontal()) {
BlockPos dPos = pos.offset(side);
IBlockState blockState = blockAccess.getBlockState(dPos);
IBlockState upState = blockAccess.getBlockState(pos.up());
ConnectionLevel level = (upState.getBlock() == Blocks.AIR && blockState.isNormalCube()) ? ConnectionLevel.HIGH : (blockState.getBlock() == Blocks.AIR ? ConnectionLevel.LOW : ConnectionLevel.MID);
if(level != ConnectionLevel.MID) {
dPos = dPos.up(level.yOffset);
blockState = blockAccess.getBlockState(dPos);
}
if(blockState.getBlock() instanceof BlockSurfaceRoot) {
return new RootConnection(level, ((BlockSurfaceRoot)blockState.getBlock()).getRadius(blockState));
} else
if(level == ConnectionLevel.MID && TreeHelper.isBranch(blockState) && TreeHelper.getTreePart(blockState).getRadius(blockState) >= 8) {
return new RootConnection(ConnectionLevel.MID, 8);
}
}
return null;
}
@Override
public void breakBlock(World worldIn, BlockPos pos, IBlockState state) {
IBlockState upstate = worldIn.getBlockState(pos.up());
if(upstate.getBlock() == ModBlocks.blockTrunkShell) {
worldIn.setBlockState(pos, upstate);
}
for(EnumFacing dir : EnumFacing.HORIZONTALS) {
BlockPos dPos = pos.offset(dir).down();
worldIn.getBlockState(dPos).neighborChanged(worldIn, dPos, this, pos);
}
}
@Override
public void neighborChanged(IBlockState state, World world, BlockPos pos, Block blockIn, BlockPos neighbor) {
if(!canBlockStay(world, pos, state)) {
world.setBlockToAir(pos);
}
}
protected boolean canBlockStay(World world, BlockPos pos, IBlockState state) {
BlockPos below = pos.down();
IBlockState belowState = world.getBlockState(below);
int thisRadius = getRadius(state);
if(belowState.isNormalCube()) {//If a branch is sitting on a solid block
for(EnumFacing dir : EnumFacing.HORIZONTALS) {
RootConnection conn = getSideConnectionRadius(world, pos, thisRadius, dir);
if(conn != null && conn.radius > thisRadius) {
return true;
}
}
} else {//If the branch has no solid block under it
boolean connections = false;
for(EnumFacing dir : EnumFacing.HORIZONTALS) {
RootConnection conn = getSideConnectionRadius(world, pos, thisRadius, dir);
if(conn != null) {
if(conn.level == ConnectionLevel.MID) {
return false;
}
if(conn.radius > thisRadius) {
connections = true;
}
}
}
return connections;
}
return false;
}
}
| |
package com.afollestad.materialdialogs.prefs;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.preference.ListPreference;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.view.View;
import com.afollestad.materialdialogs.DialogAction;
import com.afollestad.materialdialogs.MaterialDialog;
import com.afollestad.materialdialogs.commons.R;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* @author Marc Holder Kluver (marchold), Aidan Follestad (afollestad)
*/
public class MaterialListPreference extends ListPreference {
private Context context;
private MaterialDialog mDialog;
public MaterialListPreference(Context context) {
super(context);
init(context);
}
public MaterialListPreference(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context) {
this.context = context;
setLayoutResource(R.layout.md_preference_custom);
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1)
setWidgetLayoutResource(0);
}
@Override
public void setEntries(CharSequence[] entries) {
super.setEntries(entries);
if (mDialog != null)
mDialog.setItems(entries);
}
@Override
public Dialog getDialog() {
return mDialog;
}
@Override
protected void showDialog(Bundle state) {
if (getEntries() == null || getEntryValues() == null) {
throw new IllegalStateException(
"ListPreference requires an entries array and an entryValues array.");
}
int preselect = findIndexOfValue(getValue());
MaterialDialog.Builder builder = new MaterialDialog.Builder(context)
.title(getDialogTitle())
.content(getDialogMessage())
.icon(getDialogIcon())
.dismissListener(this)
.onAny(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
switch (which) {
default:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_POSITIVE);
break;
case NEUTRAL:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_NEUTRAL);
break;
case NEGATIVE:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_NEGATIVE);
break;
}
}
})
.callback(new MaterialDialog.ButtonCallback() {
@Override
public void onNeutral(MaterialDialog dialog) {
onClick(dialog, DialogInterface.BUTTON_NEUTRAL);
}
@Override
public void onNegative(MaterialDialog dialog) {
onClick(dialog, DialogInterface.BUTTON_NEGATIVE);
}
@Override
public void onPositive(MaterialDialog dialog) {
onClick(dialog, DialogInterface.BUTTON_POSITIVE);
}
})
.negativeText(getNegativeButtonText())
.items(getEntries())
.autoDismiss(true) // immediately close the dialog after selection
.itemsCallbackSingleChoice(preselect, new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog dialog, View itemView, int which, CharSequence text) {
onClick(null, DialogInterface.BUTTON_POSITIVE);
if (which >= 0 && getEntryValues() != null) {
try {
Field clickedIndex = ListPreference.class.getDeclaredField("mClickedDialogEntryIndex");
clickedIndex.setAccessible(true);
clickedIndex.set(MaterialListPreference.this, which);
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
});
final View contentView = onCreateDialogView();
if (contentView != null) {
onBindDialogView(contentView);
builder.customView(contentView, false);
} else {
builder.content(getDialogMessage());
}
try {
PreferenceManager pm = getPreferenceManager();
Method method = pm.getClass().getDeclaredMethod(
"registerOnActivityDestroyListener",
PreferenceManager.OnActivityDestroyListener.class);
method.setAccessible(true);
method.invoke(pm, this);
} catch (Exception ignored) {
}
mDialog = builder.build();
if (state != null)
mDialog.onRestoreInstanceState(state);
mDialog.show();
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
try {
PreferenceManager pm = getPreferenceManager();
Method method = pm.getClass().getDeclaredMethod(
"unregisterOnActivityDestroyListener",
PreferenceManager.OnActivityDestroyListener.class);
method.setAccessible(true);
method.invoke(pm, this);
} catch (Exception ignored) {
}
}
@Override
public void onActivityDestroy() {
super.onActivityDestroy();
if (mDialog != null && mDialog.isShowing())
mDialog.dismiss();
}
@Override
protected Parcelable onSaveInstanceState() {
final Parcelable superState = super.onSaveInstanceState();
Dialog dialog = getDialog();
if (dialog == null || !dialog.isShowing()) {
return superState;
}
final SavedState myState = new SavedState(superState);
myState.isDialogShowing = true;
myState.dialogBundle = dialog.onSaveInstanceState();
return myState;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (state == null || !state.getClass().equals(SavedState.class)) {
// Didn't save state for us in onSaveInstanceState
super.onRestoreInstanceState(state);
return;
}
SavedState myState = (SavedState) state;
super.onRestoreInstanceState(myState.getSuperState());
if (myState.isDialogShowing) {
showDialog(myState.dialogBundle);
}
}
// From DialogPreference
private static class SavedState extends BaseSavedState {
boolean isDialogShowing;
Bundle dialogBundle;
public SavedState(Parcel source) {
super(source);
isDialogShowing = source.readInt() == 1;
dialogBundle = source.readBundle();
}
@Override
public void writeToParcel(@NonNull Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(isDialogShowing ? 1 : 0);
dest.writeBundle(dialogBundle);
}
public SavedState(Parcelable superState) {
super(superState);
}
public static final Parcelable.Creator<SavedState> CREATOR =
new Parcelable.Creator<SavedState>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}
| |
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Set;
/**
* Merge policy that tries to balance not doing large
* segment merges with not accumulating too many segments in
* the index, to provide for better performance in near
* real-time setting.
*
* <p>This is based on code from zoie, described in more detail
* at http://code.google.com/p/zoie/wiki/ZoieMergePolicy.</p>
*/
public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy
{
public static final int DEFAULT_NUM_LARGE_SEGMENTS = 10;
private boolean _partialExpunge = false;
private int _numLargeSegments = DEFAULT_NUM_LARGE_SEGMENTS;
private int _maxSmallSegments = 2 * LogMergePolicy.DEFAULT_MERGE_FACTOR;
private int _maxSegments = _numLargeSegments + _maxSmallSegments;
public BalancedSegmentMergePolicy(IndexWriter writer) {
super(writer);
}
public void setMergePolicyParams(MergePolicyParams params) {
if (params!=null) {
setPartialExpunge(params._doPartialExpunge);
setNumLargeSegments(params._numLargeSegments);
setMaxSmallSegments(params._maxSmallSegments);
setPartialExpunge(params._doPartialExpunge);
setMergeFactor(params._mergeFactor);
setUseCompoundFile(params._useCompoundFile);
setMaxMergeDocs(params._maxMergeDocs);
}
}
@Override
protected long size(SegmentInfo info) throws IOException {
long byteSize = info.sizeInBytes();
float delRatio = (info.docCount <= 0 ? 0.0f : ((float)info.getDelCount() / (float)info.docCount));
return (info.docCount <= 0 ? byteSize : (long)((1.0f - delRatio) * byteSize));
}
public void setPartialExpunge(boolean doPartialExpunge) {
_partialExpunge = doPartialExpunge;
}
public boolean getPartialExpunge() {
return _partialExpunge;
}
public void setNumLargeSegments(int numLargeSegments) {
if (numLargeSegments < 2) {
throw new IllegalArgumentException("numLargeSegments cannot be less than 2");
}
_numLargeSegments = numLargeSegments;
_maxSegments = _numLargeSegments + 2 * getMergeFactor();
}
public int getNumLargeSegments() {
return _numLargeSegments;
}
public void setMaxSmallSegments(int maxSmallSegments) {
if (maxSmallSegments < getMergeFactor()) {
throw new IllegalArgumentException("maxSmallSegments cannot be less than mergeFactor");
}
_maxSmallSegments = maxSmallSegments;
_maxSegments = _numLargeSegments + _maxSmallSegments;
}
public int getMaxSmallSegments() {
return _maxSmallSegments;
}
@Override
public void setMergeFactor(int mergeFactor) {
super.setMergeFactor(mergeFactor);
if (_maxSmallSegments < getMergeFactor()) {
_maxSmallSegments = getMergeFactor();
_maxSegments = _numLargeSegments + _maxSmallSegments;
}
}
private boolean isOptimized(SegmentInfos infos, IndexWriter writer, int maxNumSegments, Set<SegmentInfo> segmentsToOptimize) throws IOException {
final int numSegments = infos.size();
int numToOptimize = 0;
SegmentInfo optimizeInfo = null;
for(int i=0;i<numSegments && numToOptimize <= maxNumSegments;i++) {
final SegmentInfo info = infos.info(i);
if (segmentsToOptimize.contains(info)) {
numToOptimize++;
optimizeInfo = info;
}
}
return numToOptimize <= maxNumSegments &&
(numToOptimize != 1 || isOptimized(writer, optimizeInfo));
}
private boolean isOptimized(IndexWriter writer, SegmentInfo info)
throws IOException {
return !info.hasDeletions() &&
!info.hasSeparateNorms() &&
info.dir == writer.getDirectory() &&
info.getUseCompoundFile() == getUseCompoundFile();
}
@Override
public MergeSpecification findMergesForOptimize(SegmentInfos infos, int maxNumSegments, Set<SegmentInfo> segmentsToOptimize) throws IOException {
assert maxNumSegments > 0;
MergeSpecification spec = null;
if (!isOptimized(infos, writer, maxNumSegments, segmentsToOptimize)) {
// Find the newest (rightmost) segment that needs to
// be optimized (other segments may have been flushed
// since optimize started):
int last = infos.size();
while(last > 0) {
final SegmentInfo info = infos.info(--last);
if (segmentsToOptimize.contains(info)) {
last++;
break;
}
}
if (last > 0) {
if (maxNumSegments == 1) {
// Since we must optimize down to 1 segment, the
// choice is simple:
boolean useCompoundFile = getUseCompoundFile();
if (last > 1 || !isOptimized(writer, infos.info(0))) {
spec = new MergeSpecification();
spec.add(new OneMerge(infos.range(0, last), useCompoundFile));
}
} else if (last > maxNumSegments) {
// find most balanced merges
spec = findBalancedMerges(infos, last, maxNumSegments, _partialExpunge);
}
}
}
return spec;
}
private MergeSpecification findBalancedMerges(SegmentInfos infos, int infoLen, int maxNumSegments, boolean partialExpunge)
throws IOException {
if (infoLen <= maxNumSegments) return null;
MergeSpecification spec = new MergeSpecification();
boolean useCompoundFile = getUseCompoundFile();
// use Viterbi algorithm to find the best segmentation.
// we will try to minimize the size variance of resulting segments.
double[][] variance = createVarianceTable(infos, infoLen, maxNumSegments);
final int maxMergeSegments = infoLen - maxNumSegments + 1;
double[] sumVariance = new double[maxMergeSegments];
int[][] backLink = new int[maxNumSegments][maxMergeSegments];
for(int i = (maxMergeSegments - 1); i >= 0; i--) {
sumVariance[i] = variance[0][i];
backLink[0][i] = 0;
}
for(int i = 1; i < maxNumSegments; i++) {
for(int j = (maxMergeSegments - 1); j >= 0; j--) {
double minV = Double.MAX_VALUE;
int minK = 0;
for(int k = j; k >= 0; k--) {
double v = sumVariance[k] + variance[i + k][j - k];
if(v < minV) {
minV = v;
minK = k;
}
}
sumVariance[j] = minV;
backLink[i][j] = minK;
}
}
// now, trace back the back links to find all merges,
// also find a candidate for partial expunge if requested
int mergeEnd = infoLen;
int prev = maxMergeSegments - 1;
int expungeCandidate = -1;
int maxDelCount = 0;
for(int i = maxNumSegments - 1; i >= 0; i--) {
prev = backLink[i][prev];
int mergeStart = i + prev;
if((mergeEnd - mergeStart) > 1) {
spec.add(new OneMerge(infos.range(mergeStart, mergeEnd), useCompoundFile));
} else {
if(partialExpunge) {
SegmentInfo info = infos.info(mergeStart);
int delCount = info.getDelCount();
if(delCount > maxDelCount) {
expungeCandidate = mergeStart;
maxDelCount = delCount;
}
}
}
mergeEnd = mergeStart;
}
if(partialExpunge && maxDelCount > 0) {
// expunge deletes
spec.add(new OneMerge(infos.range(expungeCandidate, expungeCandidate + 1), useCompoundFile));
}
return spec;
}
private double[][] createVarianceTable(SegmentInfos infos, int last, int maxNumSegments) throws IOException {
int maxMergeSegments = last - maxNumSegments + 1;
double[][] variance = new double[last][maxMergeSegments];
// compute the optimal segment size
long optSize = 0;
long[] sizeArr = new long[last];
for(int i = 0; i < sizeArr.length; i++) {
sizeArr[i] = size(infos.info(i));
optSize += sizeArr[i];
}
optSize = (optSize / maxNumSegments);
for(int i = 0; i < last; i++) {
long size = 0;
for(int j = 0; j < maxMergeSegments; j++) {
if((i + j) < last) {
size += sizeArr[i + j];
double residual = ((double)size/(double)optSize) - 1.0d;
variance[i][j] = residual * residual;
} else {
variance[i][j] = Double.NaN;
}
}
}
return variance;
}
@Override
public MergeSpecification findMergesToExpungeDeletes(SegmentInfos infos)
throws CorruptIndexException, IOException {
final int numSegs = infos.size();
final int numLargeSegs = (numSegs < _numLargeSegments ? numSegs : _numLargeSegments);
MergeSpecification spec = null;
if(numLargeSegs < numSegs) {
SegmentInfos smallSegments = infos.range(numLargeSegs, numSegs);
spec = super.findMergesToExpungeDeletes(smallSegments);
}
if(spec == null) spec = new MergeSpecification();
for(int i = 0; i < numLargeSegs; i++) {
SegmentInfo info = infos.info(i);
if(info.hasDeletions()) {
spec.add(new OneMerge(infos.range(i, i + 1), getUseCompoundFile()));
}
}
return spec;
}
@Override
public MergeSpecification findMerges(SegmentInfos infos) throws IOException {
final int numSegs = infos.size();
final int numLargeSegs = _numLargeSegments;
if (numSegs <= numLargeSegs) {
return null;
}
long totalLargeSegSize = 0;
long totalSmallSegSize = 0;
SegmentInfo info;
// compute the total size of large segments
for(int i = 0; i < numLargeSegs; i++) {
info = infos.info(i);
totalLargeSegSize += size(info);
}
// compute the total size of small segments
for(int i = numLargeSegs; i < numSegs; i++) {
info = infos.info(i);
totalSmallSegSize += size(info);
}
long targetSegSize = (totalLargeSegSize / (numLargeSegs - 1));
if(targetSegSize <= totalSmallSegSize) {
// the total size of small segments is big enough,
// promote the small segments to a large segment and do balanced merge,
if(totalSmallSegSize < targetSegSize * 2) {
MergeSpecification spec = findBalancedMerges(infos, numLargeSegs, (numLargeSegs - 1), _partialExpunge);
if(spec == null) spec = new MergeSpecification(); // should not happen
spec.add(new OneMerge(infos.range(numLargeSegs, numSegs), getUseCompoundFile()));
return spec;
} else {
return findBalancedMerges(infos, numSegs, numLargeSegs, _partialExpunge);
}
} else if (_maxSegments < numSegs) {
// we have more than _maxSegments, merge small segments smaller than targetSegSize/4
MergeSpecification spec = new MergeSpecification();
int startSeg = numLargeSegs;
long sizeThreshold = (targetSegSize / 4);
while(startSeg < numSegs) {
info = infos.info(startSeg);
if(size(info) < sizeThreshold) break;
startSeg++;
}
spec.add(new OneMerge(infos.range(startSeg, numSegs), getUseCompoundFile()));
return spec;
} else {
// apply the log merge policy to small segments.
SegmentInfos smallSegments = infos.range(numLargeSegs, numSegs);
MergeSpecification spec = super.findMerges(smallSegments);
if(_partialExpunge) {
OneMerge expunge = findOneSegmentToExpunge(infos, numLargeSegs);
if(expunge != null) {
if(spec == null) spec = new MergeSpecification();
spec.add(expunge);
}
}
return spec;
}
}
private OneMerge findOneSegmentToExpunge(SegmentInfos infos, int maxNumSegments) throws IOException {
int expungeCandidate = -1;
int maxDelCount = 0;
for(int i = maxNumSegments - 1; i >= 0; i--) {
SegmentInfo info = infos.info(i);
int delCount = info.getDelCount();
if (delCount > maxDelCount) {
expungeCandidate = i;
maxDelCount = delCount;
}
}
if (maxDelCount > 0) {
return new OneMerge(infos.range(expungeCandidate, expungeCandidate + 1), getUseCompoundFile());
}
return null;
}
public static class MergePolicyParams {
private int _numLargeSegments;
private int _maxSmallSegments;
private boolean _doPartialExpunge;
private int _mergeFactor;
private boolean _useCompoundFile;
private int _maxMergeDocs;
public MergePolicyParams() {
_useCompoundFile = true;
_doPartialExpunge = false;
_numLargeSegments = DEFAULT_NUM_LARGE_SEGMENTS;
_maxSmallSegments = 2 * LogMergePolicy.DEFAULT_MERGE_FACTOR;
_maxSmallSegments = _numLargeSegments + _maxSmallSegments;
_mergeFactor = LogMergePolicy.DEFAULT_MERGE_FACTOR;
_maxMergeDocs = LogMergePolicy.DEFAULT_MAX_MERGE_DOCS;
}
public void setNumLargeSegments(int numLargeSegments) {
_numLargeSegments = numLargeSegments;
}
public int getNumLargeSegments() {
return _numLargeSegments;
}
public void setMaxSmallSegments(int maxSmallSegments) {
_maxSmallSegments = maxSmallSegments;
}
public int getMaxSmallSegments() {
return _maxSmallSegments;
}
public void setPartialExpunge(boolean doPartialExpunge) {
_doPartialExpunge = doPartialExpunge;
}
public boolean getPartialExpunge() {
return _doPartialExpunge;
}
public void setMergeFactor(int mergeFactor) {
_mergeFactor = mergeFactor;
}
public int getMergeFactor() {
return _mergeFactor;
}
public void setMaxMergeDocs(int maxMergeDocs) {
_maxMergeDocs = maxMergeDocs;
}
public int getMaxMergeDocs() {
return _maxMergeDocs;
}
public void setUseCompoundFile(boolean useCompoundFile) {
_useCompoundFile = useCompoundFile;
}
public boolean isUseCompoundFile() {
return _useCompoundFile;
}
}
}
| |
package com.suscipio_solutions.consecro_mud.Libraries;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Common.interfaces.Session;
import com.suscipio_solutions.consecro_mud.Common.interfaces.Social;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Item;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Wearable;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.SocialsList;
import com.suscipio_solutions.consecro_mud.Locales.interfaces.Room;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMFile;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.CMParms;
import com.suscipio_solutions.consecro_mud.core.CMProps;
import com.suscipio_solutions.consecro_mud.core.CMStrings;
import com.suscipio_solutions.consecro_mud.core.CMath;
import com.suscipio_solutions.consecro_mud.core.Log;
import com.suscipio_solutions.consecro_mud.core.Resources;
import com.suscipio_solutions.consecro_mud.core.collections.SHashtable;
import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental;
// requires nothing to load
public class Socials extends StdLibrary implements SocialsList
{
@Override public String ID(){return "Socials";}
@Override
public void putSocialsInHash(final Map<String,List<Social>> soc, final List<String> lines)
{
for(int v=0;v<lines.size();v++)
{
String getline=lines.get(v);
int x=getline.indexOf("\t");
if(x>=0)
{
final Social socobj=(Social)CMClass.getCommon("DefaultSocial");
final String s=getline.substring(0,x).toUpperCase();
if(s.length()>0)
switch(s.charAt(0))
{
case 'W':
socobj.setSourceCode(CMMsg.MSG_SPEAK);
break;
case 'M':
socobj.setSourceCode(CMMsg.MSG_HANDS);
break;
case 'S':
socobj.setSourceCode(CMMsg.MSG_NOISE);
break;
case 'O':
socobj.setSourceCode(CMMsg.MSG_NOISYMOVEMENT);
break;
default:
socobj.setSourceCode(CMMsg.MSG_HANDS);
break;
}
if(s.length()>1)
switch(s.charAt(1))
{
case 'T':
socobj.setOthersCode(CMMsg.MSG_HANDS);
socobj.setTargetCode(CMMsg.MSG_HANDS);
break;
case 'S':
socobj.setOthersCode(CMMsg.MSG_NOISE);
socobj.setTargetCode(CMMsg.MSG_NOISE);
break;
case 'W':
socobj.setOthersCode(CMMsg.MSG_SPEAK);
socobj.setTargetCode(CMMsg.MSG_SPEAK);
break;
case 'V':
socobj.setOthersCode(CMMsg.MSG_NOISYMOVEMENT);
socobj.setTargetCode(CMMsg.MSG_NOISYMOVEMENT);
break;
case 'O':
socobj.setOthersCode(CMMsg.MSG_OK_VISUAL);
socobj.setTargetCode(CMMsg.MSG_OK_VISUAL);
break;
default:
socobj.setOthersCode(CMMsg.MSG_NOISYMOVEMENT);
socobj.setTargetCode(CMMsg.MSG_NOISYMOVEMENT);
break;
}
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
{
socobj.setName(getline.substring(0,x).toUpperCase());
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
{
socobj.setYou_see(getline.substring(0,x));
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
{
socobj.setThird_party_sees(getline.substring(0,x));
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
{
socobj.setTarget_sees(getline.substring(0,x));
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
{
socobj.setSee_when_no_target(getline.substring(0,x));
getline=getline.substring(x+1);
x=getline.indexOf("\t");
if(x>=0)
socobj.setMSPfile(getline.substring(0,x));
else
socobj.setMSPfile(getline);
}
else
socobj.setSee_when_no_target(getline);
}
}
put(soc,socobj.Name(),socobj);
}
}
}
}
}
@SuppressWarnings("unchecked")
public Map<String, List<Social>> getSocialHash()
{
Map<String,List<Social>> soc=(Map<String,List<Social>>)Resources.getResource("PARSED_SOCIALS: "+filename);
if(soc==null)
{
soc=new SHashtable<String,List<Social>>();
Resources.submitResource("PARSED_SOCIALS: "+filename,soc);
final List<String> V=Resources.getFileLineVector(new CMFile(filename,null,CMFile.FLAG_LOGERRORS).text());
putSocialsInHash(soc,V);
unloadDerivedResources();
}
return soc;
}
@Override public boolean isLoaded() { return Resources.getResource("PARSED_SOCIALS: "+filename)!=null; }
private String realName(String name)
{
String shortName=name.toUpperCase().trim();
final int spdex=shortName.indexOf(' ');
if(spdex>0) shortName=shortName.substring(0,spdex);
return shortName;
}
private void put(Map<String,List<Social>> H, String name, Social S)
{
name=realName(name);
List<Social> V2=H.get(name);
if(V2==null)
{
V2=new Vector<Social>(4);
H.put(name,V2);
}
for(int v=0;v<V2.size();v++)
if(V2.get(v).Name().equalsIgnoreCase(S.Name()))
{
V2.remove(v);
break;
}
V2.add(S);
}
@Override
public void put(String name, Social S)
{
put(getSocialHash(),name,S);
}
@Override
public void remove(String name)
{
final String realName=realName(name);
final List<Social> V2=getSocialHash().get(realName);
if(V2==null) return;
for(int v=0;v<V2.size();v++)
if(V2.get(v).Name().equalsIgnoreCase(name))
{
V2.remove(v);
if(V2.size()==0)
{
getSocialHash().remove(realName);
unloadDerivedResources();
}
break;
}
}
@Override
public void addSocial(Social S)
{
put(S.name(),S);
unloadDerivedResources();
}
@Override public int numSocialSets() {return getSocialHash().size();}
@Override
public void unloadSocials()
{
Resources.removeResource("PARSED_SOCIALS: "+filename);
unloadDerivedResources();
}
private void unloadDerivedResources()
{
Resources.removeResource("SOCIALS LIST");
Resources.removeResource("SOCIALS TABLE");
Resources.removeResource("WEB SOCIALS TBL");
}
@Override
public boolean shutdown()
{
unloadSocials();
return true;
}
@Override
public void modifySocialOthersCode(MOB mob, Social me, int showNumber, int showFlag)
throws IOException
{
if((showFlag>0)&&(showFlag!=showNumber)) return;
mob.session().rawPrintln(L("@x1. Others Effect type: @x2",""+showNumber,((me.othersCode()==CMMsg.MSG_HANDS)?"HANDS":((me.othersCode()==CMMsg.MSG_OK_VISUAL)?"VISUAL ONLY":((me.othersCode()==CMMsg.MSG_SPEAK)?"HEARING WORDS":((me.othersCode()==CMMsg.MSG_NOISYMOVEMENT)?"SEEING MOVEMENT":"HEARING NOISE"))))));
if((showFlag!=showNumber)&&(showFlag>-999)) return;
String newName=mob.session().choose(L("Change W)ords, M)ovement (w/noise), S)ound, V)isual, H)ands: "),L("WMSVH"),"");
if((newName!=null)&&(newName.length()>0))
{
newName=newName.toUpperCase();
switch(newName.charAt(0))
{
case 'H':
me.setOthersCode(CMMsg.MSG_HANDS);
me.setTargetCode(CMMsg.MSG_HANDS);
break;
case 'W':
me.setOthersCode(CMMsg.MSG_SPEAK);
me.setTargetCode(CMMsg.MSG_SPEAK);
break;
case 'M':
me.setOthersCode(CMMsg.MSG_NOISYMOVEMENT);
me.setTargetCode(CMMsg.MSG_NOISYMOVEMENT);
break;
case 'S':
me.setOthersCode(CMMsg.MSG_NOISE);
me.setTargetCode(CMMsg.MSG_NOISE);
break;
case 'V':
me.setOthersCode(CMMsg.MSG_OK_VISUAL);
me.setTargetCode(CMMsg.MSG_OK_VISUAL);
break;
}
}
else
mob.session().println(L("(no change)"));
}
@Override
public void modifySocialTargetCode(MOB mob, Social me, int showNumber, int showFlag)
throws IOException
{
if((showFlag>0)&&(showFlag!=showNumber)) return;
mob.session().rawPrintln(L("@x1. Target Effect type: @x2",""+showNumber,((me.targetCode()==CMMsg.MSG_HANDS)?"HANDS":((me.targetCode()==CMMsg.MSG_OK_VISUAL)?"VISUAL ONLY":((me.targetCode()==CMMsg.MSG_SPEAK)?"HEARING WORDS":((me.targetCode()==CMMsg.MSG_NOISYMOVEMENT)?"BEING MOVED ON":"HEARING NOISE"))))));
if((showFlag!=showNumber)&&(showFlag>-999)) return;
String newName=mob.session().choose(L("Change W)ords, M)ovement (w/noise), S)ound, V)isual, H)ands: "),L("WMSVH"),"");
if((newName!=null)&&(newName.length()>0))
{
newName=newName.toUpperCase();
switch(newName.charAt(0))
{
case 'W':
me.setTargetCode(CMMsg.MSG_SPEAK);
break;
case 'M':
me.setTargetCode(CMMsg.MSG_NOISYMOVEMENT);
break;
case 'H':
me.setTargetCode(CMMsg.MSG_HANDS);
break;
case 'S':
me.setTargetCode(CMMsg.MSG_NOISE);
break;
case 'V':
me.setTargetCode(CMMsg.MSG_OK_VISUAL);
break;
}
}
else
mob.session().println(L("(no change)"));
}
@Override
public void modifySocialSourceCode(MOB mob, Social me, int showNumber, int showFlag)
throws IOException
{
if((showFlag>0)&&(showFlag!=showNumber)) return;
mob.session().rawPrintln(L("@x1. Your action type: @x2",""+showNumber,((me.sourceCode()==CMMsg.MSG_NOISYMOVEMENT)?"LARGE MOVEMENT":((me.sourceCode()==CMMsg.MSG_SPEAK)?"SPEAKING":((me.sourceCode()==CMMsg.MSG_HANDS)?"MOVEMENT":"MAKING NOISE")))));
if((showFlag!=showNumber)&&(showFlag>-999)) return;
String newName=mob.session().choose(L("Change W)ords, M)ovement (small), S)ound, L)arge Movement: "),L("WMSL"),"");
if((newName!=null)&&(newName.length()>0))
{
newName=newName.toUpperCase();
switch(newName.charAt(0))
{
case 'W':
me.setSourceCode(CMMsg.MSG_SPEAK);
break;
case 'M':
me.setSourceCode(CMMsg.MSG_HANDS);
break;
case 'S':
me.setSourceCode(CMMsg.MSG_NOISE);
break;
case 'L':
me.setSourceCode(CMMsg.MSG_NOISYMOVEMENT);
break;
}
}
else
mob.session().println(L("(no change)"));
}
@Override
public boolean modifySocialInterface(MOB mob, String socialString)
throws IOException
{
final Vector<String> socialsParse=CMParms.parse(socialString);
if(socialsParse.size()==0)
{
mob.tell(L("Which social?"));
return false;
}
final String name=socialsParse.firstElement().toUpperCase().trim();
String rest=socialsParse.size()>1?CMParms.combine(socialsParse,1):"";
List<Social> socials=getSocialsSet(socialsParse.firstElement());
if(((socials==null)||(socials.size()==0))
&&((mob.session()==null)
||(!mob.session().confirm(L("The social '@x1' does not exist. Create it (y/N)? ",name),L("N")))))
return false;
if(socials==null)
socials=new Vector<Social>();
boolean resaveSocials=true;
while((resaveSocials)&&(mob.session()!=null)&&(!mob.session().isStopped()))
{
resaveSocials=false;
Social soc=null;
boolean pickNewSocial=true;
while((pickNewSocial)&&(mob.session()!=null)&&(!mob.session().isStopped()))
{
pickNewSocial=false;
final StringBuffer str=new StringBuffer(L("\n\rSelect a target:\n\r"));
int selection=-1;
for(int v=0;v<socials.size();v++)
{
final Social S=socials.get(v);
final int x=S.Name().indexOf(' ');
if(x<0)
{
str.append((v+1)+") No Target (NONE)\n\r");
continue;
}
if((rest.length()>0)
&&(S.Name().substring(x+1).toUpperCase().trim().equalsIgnoreCase(rest.toUpperCase().trim())))
selection=(v+1);
if(S.Name().substring(x+1).toUpperCase().trim().equalsIgnoreCase("<T-NAME>"))
{
str.append((v+1)+") MOB Targeted (MOBTARGET)\n\r");
continue;
}
if(S.Name().substring(x+1).toUpperCase().trim().equalsIgnoreCase("<I-NAME>"))
{
str.append((v+1)+") Room Item Targeted (ITEMTARGET)\n\r");
continue;
}
if(S.Name().substring(x+1).toUpperCase().trim().equalsIgnoreCase("<V-NAME>"))
{
str.append((v+1)+") Inventory Targeted (INVTARGET)\n\r");
continue;
}
if(S.Name().substring(x+1).toUpperCase().trim().equalsIgnoreCase("<E-NAME>"))
{
str.append((v+1)+") Equipment Targeted (EQUIPTARGET)\n\r");
continue;
}
str.append((v+1)+") "+S.Name().substring(x+1).toUpperCase().trim()+"\n\r");
}
str.append(L("@x1) Add a new target\n\r",""+(socials.size()+1)));
String s=null;
if((rest.length()>0)&&(selection<0))
selection=(socials.size()+1);
else
if(selection<0)
{
mob.session().rawPrintln(str.toString());
s=mob.session().prompt(L("\n\rSelect an option or RETURN: "),"");
if(!CMath.isInteger(s))
{
soc=null;
break;
}
selection=CMath.s_int(s);
}
if((selection>0)&&(selection<=socials.size()))
{
soc=socials.get(selection-1);
break;
}
String newOne=rest;
if(newOne.length()==0)
{
newOne="?";
while((newOne.equals("?"))&&(!mob.session().isStopped()))
{
newOne=mob.session().prompt(L("\n\rNew target (?): "),"").toUpperCase().trim();
if(newOne.equals("?"))
mob.session().println(L("Choices: MOBTARGET, ITEMTARGET, INVTARGET, EQUIPTARGET, NONE, ALL, SELF"));
}
if(newOne.trim().length()==0)
{
pickNewSocial=true;
continue;
}
}
if(newOne.startsWith("<T-")||(newOne.startsWith("T-")))
newOne="TNAME";
if(newOne.startsWith("<I-")||(newOne.startsWith("I-")))
newOne="INAME";
if(newOne.startsWith("<V-")||(newOne.startsWith("V-")))
newOne="VNAME";
if(newOne.startsWith("<E-")||(newOne.startsWith("E-")))
newOne="ENAME";
if(newOne.equalsIgnoreCase("TNAME")||newOne.equalsIgnoreCase("TARGET"))
newOne=" <T-NAME>";
else
if(newOne.equalsIgnoreCase("INAME")||newOne.equalsIgnoreCase("ITEMTARGET"))
newOne=" <I-NAME>";
else
if(newOne.equalsIgnoreCase("VNAME")||newOne.equalsIgnoreCase("INVTARGET"))
newOne=" <V-NAME>";
else
if(newOne.equalsIgnoreCase("ENAME")||newOne.equalsIgnoreCase("EQUIPTARGET"))
newOne=" <E-NAME>";
else
if(newOne.equalsIgnoreCase("NONE")) newOne="";
else
if(!newOne.equals("ALL")&&!newOne.equals("SELF")
&&!mob.session().confirm(L("'@x1' is a non-standard target. Are you sure (y/N)? ",newOne),L("N")))
{
rest="";
pickNewSocial=true;
}
else
newOne=" "+newOne;
if(!pickNewSocial)
for(int i=0;i<socials.size();i++)
if(socials.get(i).Name().equals(name+newOne))
{
mob.tell(L("This social already exists. Pick it off the list above."));
pickNewSocial=true;
break;
}
if(!pickNewSocial)
{
soc=makeDefaultSocial(name,newOne);
addSocial(soc);
if(!socials.contains(soc))
socials.add(soc);
resaveSocials=true;
}
}
if(soc!=null)
{
boolean ok=false;
int showFlag=-1;
if(CMProps.getIntVar(CMProps.Int.EDITORTYPE)>0)
showFlag=-999;
while(!ok)
{
int showNumber=0;
soc.setYou_see(CMLib.genEd().prompt(mob,soc.You_see(),++showNumber,showFlag,L("You-see string"),false,true));
if(soc.sourceCode()==CMMsg.MSG_OK_ACTION) soc.setSourceCode(CMMsg.MSG_HANDS);
modifySocialSourceCode(mob,soc,++showNumber,showFlag);
soc.setThird_party_sees(CMLib.genEd().prompt(mob,soc.Third_party_sees(),++showNumber,showFlag,L("Others-see string"),false,true));
if(soc.othersCode()==CMMsg.MSG_OK_ACTION) soc.setOthersCode(CMMsg.MSG_HANDS);
modifySocialOthersCode(mob,soc,++showNumber,showFlag);
if(soc.Name().endsWith(" <T-NAME>"))
{
soc.setTarget_sees(CMLib.genEd().prompt(mob,soc.Target_sees(),++showNumber,showFlag,L("Target-sees string"),false,true));
if(soc.targetCode()==CMMsg.MSG_OK_ACTION) soc.setTargetCode(CMMsg.MSG_HANDS);
modifySocialTargetCode(mob,soc,++showNumber,showFlag);
}
if(soc.Name().endsWith(" <T-NAME>")||soc.Name().endsWith(" <I-NAME>")||soc.Name().endsWith(" <V-NAME>")||soc.Name().endsWith(" <E-NAME>")||(soc.Name().endsWith(" ALL")))
soc.setSee_when_no_target(CMLib.genEd().prompt(mob,soc.See_when_no_target(),++showNumber,showFlag,L("You-see when no target"),false,true));
soc.setMSPfile(CMLib.genEd().prompt(mob,soc.MSPfile(),++showNumber,showFlag,L("Sound file"),true,false));
resaveSocials=true;
if(showFlag<-900){ ok=true; break;}
if(showFlag>0){ showFlag=-1; continue;}
final String input = mob.session().prompt(L("Edit which (or DELETE)? "),"");
showFlag=CMath.s_int(input);
if((input!=null)&&(input.equalsIgnoreCase("DELETE")))
{
remove(soc.Name());
socials.remove(soc);
mob.session().rawOut(L("\n\rSocial variation '@x1' deleted.\n\r",soc.Name()));
showFlag=-1;
ok=true;
}
else
if(showFlag<=0)
{
showFlag=-1;
ok=true;
}
}
}
if((resaveSocials)&&(soc!=null))
{
save(mob);
Log.sysOut("Socials",mob.Name()+" modified social "+soc.name()+".");
soc=null;
if(rest.length()>0)
break;
}
}
return true;
}
@Override
public Social fetchSocial(List<Social> set, String name, boolean exactOnly)
{
for(int s=0;s<set.size();s++)
if(set.get(s).Name().equalsIgnoreCase(name))
return set.get(s);
if(exactOnly) return null;
name=name.toUpperCase();
for(int s=0;s<set.size();s++)
if(set.get(s).Name().toUpperCase().startsWith(name))
return set.get(s);
return null;
}
@Override
public Social fetchSocial(String baseName, Environmental targetE, boolean exactOnly)
{
return fetchSocial(getSocialHash(),baseName,targetE,exactOnly);
}
protected Social fetchSocial(final Map<String,List<Social>> soc, final String baseName, final Environmental targetE, final boolean exactOnly)
{
if(targetE==null) return fetchSocial(soc,baseName,exactOnly);
if(targetE instanceof MOB) return fetchSocial(soc,baseName+" <T-NAME>",exactOnly);
if(!(targetE instanceof Item)) return null;
final Item I=(Item)targetE;
if(I.owner() instanceof Room) return fetchSocial(soc,baseName+" <I-NAME>",exactOnly);
if(!(I.owner() instanceof MOB)) return null;
if(I.amWearingAt(Wearable.IN_INVENTORY)) return fetchSocial(soc,baseName+" <V-NAME>",exactOnly);
return fetchSocial(soc,baseName+" <E-NAME>",exactOnly);
}
@Override
public Social fetchSocial(String name, boolean exactOnly)
{
return fetchSocial(getSocialHash(),name,exactOnly);
}
protected Social fetchSocial(final Map<String,List<Social>> soc, final String name, final boolean exactOnly)
{
final String realName=realName(name);
final List<Social> V=soc.get(realName);
if((V==null)&&(exactOnly)) return null;
Social S=null;
if(V!=null)
S=fetchSocial(V,name,exactOnly);
if(S!=null) return S;
if(V==null) return null;
for(final String key : soc.keySet())
{
if(key.startsWith(name))
return fetchSocial(V,name,false);
}
return null;
}
@Override
public Social fetchSocial(List<String> C, boolean exactOnly, boolean checkItemTargets)
{
return fetchSocialFromSet(getSocialHash(),C,exactOnly,checkItemTargets);
}
@Override
public Social fetchSocialFromSet(final Map<String,List<Social>> soc, final List<String> C, final boolean exactOnly, final boolean checkItemTargets)
{
if(C==null) return null;
if(C.size()==0) return null;
String socialName=C.get(0);
String theRest="";
Social S=null;
boolean tryTargets=false;
if(C.size()>1)
{
final String Target=C.get(1).toUpperCase();
S=fetchSocial(soc,socialName+" "+Target,true);
if((S==null)
&&((!Target.equals("SELF"))&&(!Target.equals("ALL"))))
{
if(checkItemTargets)
tryTargets=true;
else
theRest=" <T-NAME>";
}
else
if(S==null)
theRest=" <T-NAME>";
}
if(S==null)
{
if(!tryTargets)
S=fetchSocial(soc,socialName+theRest,true);
else
if((S=fetchSocial(soc,socialName+" <T-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <I-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <E-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <V-NAME>",true))==null)
{}
}
if((S==null)&&(!exactOnly))
{
String backupSocialName=null;
final String socName=socialName.toUpperCase();
socialName=null;
for(final String key : soc.keySet())
{
if((key.startsWith(socName))&&(key.indexOf(' ')<0))
{
socialName=key;
break;
}
else
if(key.startsWith(socName))
{
backupSocialName=key;
break;
}
}
if(socialName==null)
if(backupSocialName == null)
socialName=C.get(0);
else
socialName=backupSocialName;
if(socialName!=null)
if(!tryTargets)
S=fetchSocial(soc,socialName+theRest,true);
else
if((S=fetchSocial(soc,socialName+" <T-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <I-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <E-NAME>",true))==null)
if((S=fetchSocial(soc,socialName+" <V-NAME>",true))==null)
{}
}
return S;
}
@Override
public List<Social> enumSocialSet(int index)
{
if((index<0)||(index>numSocialSets())) return null;
int i=0;
final Map<String,List<Social>> soc=getSocialHash();
for (final String key : soc.keySet())
{
final List<Social> V=soc.get(key);
if((i++)==index) return V;
}
return null;
}
@Override
public Social makeDefaultSocial(String name, String type)
{
final Social soc=(Social)CMClass.getCommon("DefaultSocial");
if((type.length()>0)&&(!type.startsWith(" ")))
type=" "+type;
soc.setName(name+type);
if(type.trim().length()==0)
{
soc.setYou_see("You "+name.toLowerCase()+".");
soc.setThird_party_sees("<S-NAME> "+name.toLowerCase()+"s.");
soc.setSourceCode(CMMsg.MSG_HANDS);
soc.setOthersCode(CMMsg.MSG_HANDS);
}
else
if(type.trim().equals("ALL"))
{
soc.setYou_see("You "+name.toLowerCase()+" everyone.");
soc.setThird_party_sees("<S-NAME> "+name.toLowerCase()+"s everyone.");
soc.setSee_when_no_target(CMStrings.capitalizeAndLower(name)+" who?");
soc.setSourceCode(CMMsg.MSG_SPEAK);
soc.setOthersCode(CMMsg.MSG_SPEAK);
}
else
if(type.trim().endsWith("-NAME>"))
{
soc.setYou_see("You "+name.toLowerCase()+" <T-NAME>.");
soc.setTarget_sees("<S-NAME> "+name.toLowerCase()+"s you.");
soc.setThird_party_sees("<S-NAME> "+name.toLowerCase()+"s <T-NAMESELF>.");
soc.setSee_when_no_target(CMStrings.capitalizeAndLower(name)+" who?");
soc.setSourceCode(CMMsg.MSG_NOISYMOVEMENT);
soc.setTargetCode(CMMsg.MSG_NOISYMOVEMENT);
soc.setOthersCode(CMMsg.MSG_NOISYMOVEMENT);
}
else
if(type.trim().equals("SELF"))
{
soc.setYou_see("You "+name.toLowerCase()+" yourself.");
soc.setThird_party_sees("<S-NAME> "+name.toLowerCase()+"s <S-HIM-HERSELF>.");
soc.setSourceCode(CMMsg.MSG_NOISE);
soc.setOthersCode(CMMsg.MSG_NOISE);
}
else
{
soc.setYou_see("You "+name.toLowerCase()+type.toLowerCase()+".");
soc.setThird_party_sees("<S-NAME> "+name.toLowerCase()+"s"+type.toLowerCase()+".");
soc.setSourceCode(CMMsg.MSG_HANDS);
soc.setOthersCode(CMMsg.MSG_HANDS);
}
return soc;
}
@Override
public void save(MOB whom)
{
if(!isLoaded()) return;
final Map<String,List<Social>> soc=getSocialHash();
final StringBuffer buf=new StringBuffer("");
Vector<Social> V2=new Vector<Social>();
for (final String key : soc.keySet())
{
final List<Social> V1 = soc.get(key);
for(int v1=0;v1<V1.size();v1++)
{
final Social S1=V1.get(v1);
for(int v2=0;v2<V2.size();v2++)
{
final Social S2=V2.elementAt(v2);
if(S1.equals(S2))
{
V2.insertElementAt(S1,v2);
break;
}
}
if(!V2.contains(S1))
V2.addElement(S1);
}
}
final Vector<Social> sorted=new Vector<Social>();
while(V2.size()>0)
{
Social lowest=V2.firstElement();
Social S=null;
for(int i=1;i<V2.size();i++)
{
S=V2.elementAt(i);
if(S.name().compareToIgnoreCase(lowest.Name())<=0)
lowest=S;
}
V2.remove(lowest);
sorted.add(lowest);
}
V2=sorted;
for(int v=0;v<V2.size();v++)
{
final Social I=V2.elementAt(v);
switch(I.sourceCode())
{
case CMMsg.MSG_SPEAK:
buf.append('w');
break;
case CMMsg.MSG_HANDS:
buf.append('m');
break;
case CMMsg.MSG_NOISE:
buf.append('s');
break;
case CMMsg.MSG_NOISYMOVEMENT:
buf.append('o');
break;
default:
buf.append(' ');
break;
}
switch(I.targetCode())
{
case CMMsg.MSG_HANDS:
buf.append('t');
break;
case CMMsg.MSG_NOISE:
buf.append('s');
break;
case CMMsg.MSG_SPEAK:
buf.append('w');
break;
case CMMsg.MSG_NOISYMOVEMENT:
buf.append('v');
break;
case CMMsg.MSG_OK_VISUAL:
buf.append('o');
break;
default:
buf.append(' ');
break;
}
final String[] stuff=new String[6];
stuff[0]=I.name();
stuff[1]=I.You_see();
stuff[2]=I.Third_party_sees();
stuff[3]=I.Target_sees();
stuff[4]=I.See_when_no_target();
stuff[5]=I.MSPfile();
buf.append('\t');
for (final String element : stuff)
{
if(element==null)
buf.append("\t");
else
buf.append(element+"\t");
}
buf.setCharAt(buf.length()-1,'\r');
buf.append('\n');
}
// allowed is forced because this is already protected by SOCIALS security flag
if(!new CMFile(filename,whom,CMFile.FLAG_FORCEALLOW).saveText(buf))
Log.errOut("Socials","Unable to save socials.txt!");
unloadDerivedResources();
}
@Override
public List<Social> getSocialsSet(String named)
{
named=realName(named);
return getSocialHash().get(named);
}
@Override
public String findSocialName(String named, boolean exactOnly)
{
return findSocialName(getSocialHash(),named,exactOnly);
}
protected String findSocialName(final Map<String,List<Social>> soc, final String named, final boolean exactOnly)
{
if(named==null) return null;
final Social S = fetchSocial(soc, named,exactOnly);
if(S!=null)
return realName(S.Name()).toLowerCase();
return null;
}
@Override
public String getSocialsHelp(MOB mob, String named, boolean exact)
{
final String realName=findSocialName(named,exact);
if(realName==null) return null;
final List<Social> list=getSocialsSet(realName.toUpperCase());
if((list==null)||(list.size()==0)) return null;
final StringBuffer help=new StringBuffer("");
help.append("^H\n\r");
help.append("Social : ^x"+realName+"^.^N\n\r");
final Session session=(mob!=null)?mob.session():null;
final MOB tgtMOB=CMClass.getFactoryMOB();
tgtMOB.setName(L("the target"));
final MOB othMOB=CMClass.getFactoryMOB();
othMOB.setName(L("someone"));
for(int l=0;l<list.size();l++)
{
final Social S=list.get(l);
final int x=S.Name().indexOf(' ');
final String rest=(x>0)?S.Name().substring(x+1).trim().toUpperCase():"";
if(rest.length()==0)
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^xnone^.^N\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, null, null, S.You_see(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, null, null, S.Third_party_sees(), false)+"\n\r");
}
else
if(rest.equals("<T-NAME>"))
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^xsomeone^.^N\n\r");
help.append("No Target : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.See_when_no_target(), false)+"\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.You_see(), false)+"\n\r");
help.append("Target sees: "+CMLib.coffeeFilter().fullOutFilter(session, tgtMOB, mob, tgtMOB, null, S.Target_sees(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, tgtMOB, null, S.Third_party_sees(), false)+"\n\r");
}
else
if(rest.equals("<I-NAME>"))
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^xroom item^.^N\n\r");
help.append("No Target : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.See_when_no_target(), false)+"\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.You_see(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, tgtMOB, null, S.Third_party_sees(), false)+"\n\r");
}
else
if(rest.equals("<V-NAME>"))
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^xinventory item^.^N\n\r");
help.append("No Target : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.See_when_no_target(), false)+"\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.You_see(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, tgtMOB, null, S.Third_party_sees(), false)+"\n\r");
}
else
if(rest.equals("<E-NAME>"))
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^xequipped item^.^N\n\r");
help.append("No Target : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.See_when_no_target(), false)+"\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, tgtMOB, null, S.You_see(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, tgtMOB, null, S.Third_party_sees(), false)+"\n\r");
}
else
{
help.append("\n\r");
help.append("^H");
help.append("Target ^?: ^x"+rest.toLowerCase()+"^.^N\n\r");
help.append("You see : "+CMLib.coffeeFilter().fullOutFilter(session, mob, mob, null, null, S.You_see(), false)+"\n\r");
help.append("Others see : "+CMLib.coffeeFilter().fullOutFilter(session, othMOB, mob, null, null, S.Third_party_sees(), false)+"\n\r");
}
}
tgtMOB.destroy();
othMOB.destroy();
return help.toString();
}
@Override
@SuppressWarnings("unchecked")
public List<String> getSocialsList()
{
final List<String> socialsList=(List<String>)Resources.getResource("SOCIALS LIST");
if(socialsList!=null) return socialsList;
final List<String> socialVec=new Vector<String>();
for(int s=0;s<CMLib.socials().numSocialSets();s++)
{
final List<Social> V=CMLib.socials().enumSocialSet(s);
if((V==null)||(V.size()==0)) continue;
final Social S=V.get(0);
socialVec.add(realName(S.Name()));
}
Collections.sort(socialVec);
Resources.submitResource("SOCIALS LIST",socialVec);
return socialVec;
}
@Override
public String getSocialsTable()
{
StringBuffer socialsList=(StringBuffer)Resources.getResource("SOCIALS TABLE");
if(socialsList!=null) return socialsList.toString();
final List<String> socialVec=getSocialsList();
socialsList=new StringBuffer("");
int col=0;
for(int i=0;i<socialVec.size();i++)
{
if((++col)>4)
{
socialsList.append("\n\r");
col=1;
}
socialsList.append(CMStrings.padRight(socialVec.get(i),19));
}
Resources.submitResource("SOCIALS TABLE",socialsList);
return socialsList.toString();
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.instance.shard;
import static build.buildfarm.instance.shard.RedisShardBackplane.parseOperationChange;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import build.bazel.remote.execution.v2.Platform;
import build.bazel.remote.execution.v2.RequestMetadata;
import build.buildfarm.v1test.DispatchedOperation;
import build.buildfarm.v1test.ExecuteEntry;
import build.buildfarm.v1test.OperationChange;
import build.buildfarm.v1test.QueueEntry;
import build.buildfarm.v1test.RedisShardBackplaneConfig;
import build.buildfarm.v1test.WorkerChange;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.util.JsonFormat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Supplier;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import redis.clients.jedis.JedisCluster;
@RunWith(JUnit4.class)
public class RedisShardBackplaneTest {
private RedisShardBackplane backplane;
@Mock Supplier<JedisCluster> mockJedisClusterFactory;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
}
@Test
public void workersWithInvalidProtobufAreRemoved() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setWorkersHashName("Workers")
.setWorkerChannel("WorkerChannel")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
when(jedisCluster.hgetAll(config.getWorkersHashName()))
.thenReturn(ImmutableMap.of("foo", "foo"));
when(jedisCluster.hdel(config.getWorkersHashName(), "foo")).thenReturn(1L);
backplane =
new RedisShardBackplane(
config,
"invalid-protobuf-worker-removed-test",
(o) -> o,
(o) -> o,
mockJedisClusterFactory);
backplane.start("startTime/test:0000");
assertThat(backplane.getWorkers()).isEmpty();
verify(jedisCluster, times(1)).hdel(config.getWorkersHashName(), "foo");
ArgumentCaptor<String> changeCaptor = ArgumentCaptor.forClass(String.class);
verify(jedisCluster, times(1)).publish(eq(config.getWorkerChannel()), changeCaptor.capture());
String json = changeCaptor.getValue();
WorkerChange.Builder builder = WorkerChange.newBuilder();
JsonFormat.parser().merge(json, builder);
WorkerChange workerChange = builder.build();
assertThat(workerChange.getName()).isEqualTo("foo");
assertThat(workerChange.getTypeCase()).isEqualTo(WorkerChange.TypeCase.REMOVE);
}
void verifyChangePublished(JedisCluster jedis) throws IOException {
ArgumentCaptor<String> changeCaptor = ArgumentCaptor.forClass(String.class);
verify(jedis, times(1)).publish(eq(backplane.operationChannel("op")), changeCaptor.capture());
OperationChange opChange = parseOperationChange(changeCaptor.getValue());
assertThat(opChange.hasReset()).isTrue();
assertThat(opChange.getReset().getOperation().getName()).isEqualTo("op");
}
@Test
public void prequeueUpdatesOperationPrequeuesAndPublishes() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setOperationChannelPrefix("OperationChannel")
.setOperationExpire(10)
.setOperationPrefix("Operation")
.setPreQueuedOperationsListName("{hash}PreQueuedOperations")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "prequeue-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
final String opName = "op";
ExecuteEntry executeEntry = ExecuteEntry.newBuilder().setOperationName(opName).build();
Operation op = Operation.newBuilder().setName(opName).build();
backplane.prequeue(executeEntry, op);
verify(mockJedisClusterFactory, times(1)).get();
verify(jedisCluster, times(1))
.setex(
backplane.operationKey(opName),
config.getOperationExpire(),
RedisShardBackplane.operationPrinter.print(op));
verify(jedisCluster, times(1))
.lpush(config.getPreQueuedOperationsListName(), JsonFormat.printer().print(executeEntry));
verifyChangePublished(jedisCluster);
}
@Test
public void queuingPublishes() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setOperationChannelPrefix("OperationChannel")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "requeue-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
final String opName = "op";
backplane.queueing(opName);
verify(mockJedisClusterFactory, times(1)).get();
verifyChangePublished(jedisCluster);
}
@Test
public void requeueDispatchedOperationQueuesAndPublishes() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setDispatchedOperationsHashName("DispatchedOperations")
.setOperationChannelPrefix("OperationChannel")
.setQueuedOperationsListName("{hash}QueuedOperations")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "requeue-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
final String opName = "op";
when(jedisCluster.hdel(config.getDispatchedOperationsHashName(), opName)).thenReturn(1L);
QueueEntry queueEntry =
QueueEntry.newBuilder()
.setExecuteEntry(ExecuteEntry.newBuilder().setOperationName("op").build())
.build();
backplane.requeueDispatchedOperation(queueEntry);
verify(mockJedisClusterFactory, times(1)).get();
verify(jedisCluster, times(1)).hdel(config.getDispatchedOperationsHashName(), opName);
verify(jedisCluster, times(1))
.lpush(config.getQueuedOperationsListName(), JsonFormat.printer().print(queueEntry));
verifyChangePublished(jedisCluster);
}
@Test
public void dispatchedOperationsShowProperRequeueAmount0to1()
throws IOException, InterruptedException {
// ARRANGE
int STARTING_REQUEUE_AMOUNT = 0;
int REQUEUE_AMOUNT_WHEN_DISPATCHED = 0;
int REQUEUE_AMOUNT_WHEN_READY_TO_REQUEUE = 1;
// create a backplane
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setDispatchedOperationsHashName("DispatchedOperations")
.setOperationChannelPrefix("OperationChannel")
.setQueuedOperationsListName("{hash}QueuedOperations")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "requeue-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
// ARRANGE
// Assume the operation queue is already populated with a first-time operation.
// this means the operation's requeue amount will be 0.
// The jedis cluser is also mocked to assume success on other operations.
QueueEntry queueEntry =
QueueEntry.newBuilder()
.setExecuteEntry(ExecuteEntry.newBuilder().setOperationName("op").build())
.setRequeueAttempts(STARTING_REQUEUE_AMOUNT)
.build();
String queueEntryJson = JsonFormat.printer().print(queueEntry);
when(jedisCluster.brpoplpush(any(String.class), any(String.class), any(Integer.class)))
.thenReturn(queueEntryJson);
// PRE-ASSERT
when(jedisCluster.hsetnx(any(String.class), any(String.class), any(String.class)))
.thenAnswer(
args -> {
// Extract the operation that was dispatched
String dispatchedOperationJson = args.getArgument(2);
DispatchedOperation.Builder dispatchedOperationBuilder =
DispatchedOperation.newBuilder();
JsonFormat.parser().merge(dispatchedOperationJson, dispatchedOperationBuilder);
DispatchedOperation dispatchedOperation = dispatchedOperationBuilder.build();
assertThat(dispatchedOperation.getQueueEntry().getRequeueAttempts())
.isEqualTo(REQUEUE_AMOUNT_WHEN_DISPATCHED);
return 1L;
});
// ACT
// dispatch the operation and test properties of the QueueEntry and internal jedis calls.
List<Platform.Property> properties = new ArrayList<>();
QueueEntry readyForRequeue = backplane.dispatchOperation(properties);
// ASSERT
assertThat(readyForRequeue.getRequeueAttempts())
.isEqualTo(REQUEUE_AMOUNT_WHEN_READY_TO_REQUEUE);
}
@Test
public void dispatchedOperationsShowProperRequeueAmount1to2()
throws IOException, InterruptedException {
// ARRANGE
int STARTING_REQUEUE_AMOUNT = 1;
int REQUEUE_AMOUNT_WHEN_DISPATCHED = 1;
int REQUEUE_AMOUNT_WHEN_READY_TO_REQUEUE = 2;
// create a backplane
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setDispatchedOperationsHashName("DispatchedOperations")
.setOperationChannelPrefix("OperationChannel")
.setQueuedOperationsListName("{hash}QueuedOperations")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "requeue-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
// Assume the operation queue is already populated from a first re-queue.
// this means the operation's requeue amount will be 1.
// The jedis cluser is also mocked to assume success on other operations.
QueueEntry queueEntry =
QueueEntry.newBuilder()
.setExecuteEntry(ExecuteEntry.newBuilder().setOperationName("op").build())
.setRequeueAttempts(STARTING_REQUEUE_AMOUNT)
.build();
String queueEntryJson = JsonFormat.printer().print(queueEntry);
when(jedisCluster.brpoplpush(any(String.class), any(String.class), any(Integer.class)))
.thenReturn(queueEntryJson);
// PRE-ASSERT
when(jedisCluster.hsetnx(any(String.class), any(String.class), any(String.class)))
.thenAnswer(
args -> {
// Extract the operation that was dispatched
String dispatchedOperationJson = args.getArgument(2);
DispatchedOperation.Builder dispatchedOperationBuilder =
DispatchedOperation.newBuilder();
JsonFormat.parser().merge(dispatchedOperationJson, dispatchedOperationBuilder);
DispatchedOperation dispatchedOperation = dispatchedOperationBuilder.build();
assertThat(dispatchedOperation.getQueueEntry().getRequeueAttempts())
.isEqualTo(REQUEUE_AMOUNT_WHEN_DISPATCHED);
return 1L;
});
// ACT
// dispatch the operation and test properties of the QueueEntry and internal jedis calls.
List<Platform.Property> properties = new ArrayList<>();
QueueEntry readyForRequeue = backplane.dispatchOperation(properties);
// ASSERT
assertThat(readyForRequeue.getRequeueAttempts())
.isEqualTo(REQUEUE_AMOUNT_WHEN_READY_TO_REQUEUE);
}
@Test
public void completeOperationUndispatches() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setDispatchedOperationsHashName("DispatchedOperations")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "complete-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
final String opName = "op";
backplane.completeOperation(opName);
verify(mockJedisClusterFactory, times(1)).get();
verify(jedisCluster, times(1)).hdel(config.getDispatchedOperationsHashName(), opName);
}
@org.junit.Ignore
@Test
public void deleteOperationDeletesAndPublishes() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setDispatchedOperationsHashName("DispatchedOperations")
.setOperationPrefix("Operation")
.setOperationChannelPrefix("OperationChannel")
.build();
JedisCluster jedisCluster = mock(JedisCluster.class);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "delete-operation-test", (o) -> o, (o) -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
final String opName = "op";
backplane.deleteOperation(opName);
verify(mockJedisClusterFactory, times(1)).get();
verify(jedisCluster, times(1)).hdel(config.getDispatchedOperationsHashName(), opName);
verify(jedisCluster, times(1)).del(backplane.operationKey(opName));
verifyChangePublished(jedisCluster);
}
@Test
public void invocationsCanBeBlacklisted() throws IOException {
RedisShardBackplaneConfig config =
RedisShardBackplaneConfig.newBuilder()
.setInvocationBlacklistPrefix("InvocationBlacklist")
.build();
UUID toolInvocationId = UUID.randomUUID();
JedisCluster jedisCluster = mock(JedisCluster.class);
String invocationBlacklistKey = config.getInvocationBlacklistPrefix() + ":" + toolInvocationId;
when(jedisCluster.exists(invocationBlacklistKey)).thenReturn(true);
when(mockJedisClusterFactory.get()).thenReturn(jedisCluster);
backplane =
new RedisShardBackplane(
config, "invocation-blacklist-test", o -> o, o -> o, mockJedisClusterFactory);
backplane.start("startTime/test:0000");
assertThat(
backplane.isBlacklisted(
RequestMetadata.newBuilder()
.setToolInvocationId(toolInvocationId.toString())
.build()))
.isTrue();
verify(mockJedisClusterFactory, times(1)).get();
verify(jedisCluster, times(1)).exists(invocationBlacklistKey);
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2016.09.14 at 11:44:49 AM PET
//
package org.openfact.models.jpa.entities.ubl.common;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
@Entity
@Table(name = "UBLEXTENSION")
public class UBLExtensionEntity {
@Id
@Column(name = "ID")
@GeneratedValue(generator = "uuid2")
@GenericGenerator(name = "uuid2", strategy = "uuid2")
@Access(AccessType.PROPERTY)
protected String id;
@Column(name = "ID_UBL")
protected String ID;
@Column(name = "NAME")
protected String name;
@Column(name = "EXTENSION_AGENCY_ID")
protected String extensionAgencyID;
@Column(name = "EXTENSION_AGENCY_NAME")
protected String extensionAgencyName;
@Column(name = "EXTENSION_VERSION_ID")
protected String extensionVersionID;
@Column(name = "EXTENSION_AGENCY_URI")
protected String extensionAgencyURI;
@Column(name = "EXTENSION_URI")
protected String extensionURI;
@Column(name = "EXTENSION_REASON_CODE")
protected String extensionReasonCode;
@Column(name = "EXTENSION_REASON")
protected String extensionReason;
@ManyToOne(targetEntity = ExtensionContentEntity.class, cascade = { CascadeType.ALL })
@JoinColumn(name = "EXTENSION_CONTENT_ID")
protected ExtensionContentEntity extensionContent = new ExtensionContentEntity();
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @return the iD
*/
public String getID() {
return ID;
}
/**
* @param iD
* the iD to set
*/
public void setID(String iD) {
ID = iD;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the extensionAgencyID
*/
public String getExtensionAgencyID() {
return extensionAgencyID;
}
/**
* @param extensionAgencyID
* the extensionAgencyID to set
*/
public void setExtensionAgencyID(String extensionAgencyID) {
this.extensionAgencyID = extensionAgencyID;
}
/**
* @return the extensionAgencyName
*/
public String getExtensionAgencyName() {
return extensionAgencyName;
}
/**
* @param extensionAgencyName
* the extensionAgencyName to set
*/
public void setExtensionAgencyName(String extensionAgencyName) {
this.extensionAgencyName = extensionAgencyName;
}
/**
* @return the extensionVersionID
*/
public String getExtensionVersionID() {
return extensionVersionID;
}
/**
* @param extensionVersionID
* the extensionVersionID to set
*/
public void setExtensionVersionID(String extensionVersionID) {
this.extensionVersionID = extensionVersionID;
}
/**
* @return the extensionAgencyURI
*/
public String getExtensionAgencyURI() {
return extensionAgencyURI;
}
/**
* @param extensionAgencyURI
* the extensionAgencyURI to set
*/
public void setExtensionAgencyURI(String extensionAgencyURI) {
this.extensionAgencyURI = extensionAgencyURI;
}
/**
* @return the extensionURI
*/
public String getExtensionURI() {
return extensionURI;
}
/**
* @param extensionURI
* the extensionURI to set
*/
public void setExtensionURI(String extensionURI) {
this.extensionURI = extensionURI;
}
/**
* @return the extensionReasonCode
*/
public String getExtensionReasonCode() {
return extensionReasonCode;
}
/**
* @param extensionReasonCode
* the extensionReasonCode to set
*/
public void setExtensionReasonCode(String extensionReasonCode) {
this.extensionReasonCode = extensionReasonCode;
}
/**
* @return the extensionReason
*/
public String getExtensionReason() {
return extensionReason;
}
/**
* @param extensionReason
* the extensionReason to set
*/
public void setExtensionReason(String extensionReason) {
this.extensionReason = extensionReason;
}
/**
* @return the extensionContent
*/
public ExtensionContentEntity getExtensionContent() {
return extensionContent;
}
/**
* @param extensionContent
* the extensionContent to set
*/
public void setExtensionContent(ExtensionContentEntity extensionContent) {
this.extensionContent = extensionContent;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.changes.actions;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.ui.ChangesListView;
import com.intellij.openapi.vcs.changes.ui.RollbackChangesDialog;
import com.intellij.openapi.vcs.changes.ui.RollbackProgressModifier;
import com.intellij.openapi.vcs.rollback.RollbackEnvironment;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.ThreeState;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.RollbackUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.openapi.ui.Messages.getQuestionIcon;
import static com.intellij.openapi.ui.Messages.showYesNoDialog;
import static com.intellij.util.containers.UtilKt.notNullize;
public class RollbackAction extends AnAction implements DumbAware, UpdateInBackground {
@Override
public void update(@NotNull AnActionEvent e) {
Project project = e.getData(CommonDataKeys.PROJECT);
final boolean visible = project != null && ProjectLevelVcsManager.getInstance(project).hasActiveVcss();
e.getPresentation().setEnabledAndVisible(visible);
if (! visible) return;
final Change[] leadSelection = e.getData(VcsDataKeys.CHANGE_LEAD_SELECTION);
boolean isEnabled = (leadSelection != null && leadSelection.length > 0) ||
Boolean.TRUE.equals(e.getData(VcsDataKeys.HAVE_LOCALLY_DELETED)) ||
Boolean.TRUE.equals(e.getData(VcsDataKeys.HAVE_MODIFIED_WITHOUT_EDITING)) ||
Boolean.TRUE.equals(e.getData(VcsDataKeys.HAVE_SELECTED_CHANGES)) ||
hasReversibleFiles(e) ||
currentChangelistNotEmpty(project);
e.getPresentation().setEnabled(isEnabled);
String operationName = RollbackUtil.getRollbackOperationName(project);
e.getPresentation().setText(operationName + "...");
if (isEnabled) {
e.getPresentation().setDescription(UIUtil.removeMnemonic(operationName) + " selected changes");
}
}
private static boolean hasReversibleFiles(@NotNull AnActionEvent e) {
ChangeListManager manager = ChangeListManager.getInstance(e.getRequiredData(CommonDataKeys.PROJECT));
Set<VirtualFile> modifiedWithoutEditing = new HashSet<>(manager.getModifiedWithoutEditing());
return notNullize(e.getData(VcsDataKeys.VIRTUAL_FILE_STREAM)).anyMatch(
file -> manager.haveChangesUnder(file) != ThreeState.NO || manager.isFileAffected(file) || modifiedWithoutEditing.contains(file));
}
private static boolean currentChangelistNotEmpty(Project project) {
ChangeListManager clManager = ChangeListManager.getInstance(project);
ChangeList list = clManager.getDefaultChangeList();
return !list.getChanges().isEmpty();
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
Project project = e.getData(CommonDataKeys.PROJECT);
if (project == null) {
return;
}
final String title = ActionPlaces.CHANGES_VIEW_TOOLBAR.equals(e.getPlace())
? null
: "Can not " + UIUtil.removeMnemonic(RollbackUtil.getRollbackOperationName(project)) + " now";
if (ChangeListManager.getInstance(project).isFreezedWithNotification(title)) return;
List<FilePath> missingFiles = e.getData(ChangesListView.MISSING_FILES_DATA_KEY);
List<Change> changes = getChanges(project, e);
LinkedHashSet<VirtualFile> modifiedWithoutEditing = getModifiedWithoutEditing(e, project);
if (modifiedWithoutEditing != null) {
changes = ContainerUtil.filter(changes, change -> !modifiedWithoutEditing.contains(change.getVirtualFile()));
}
FileDocumentManager.getInstance().saveAllDocuments();
boolean hasChanges = false;
if (missingFiles != null && !missingFiles.isEmpty()) {
hasChanges = true;
new RollbackDeletionAction().actionPerformed(e);
}
if (modifiedWithoutEditing != null && !modifiedWithoutEditing.isEmpty()) {
hasChanges = true;
rollbackModifiedWithoutEditing(project, modifiedWithoutEditing);
}
if (!changes.isEmpty()) {
RollbackChangesDialog.rollbackChanges(project, changes);
}
else if (!hasChanges) {
LocalChangeList currentChangeList = ChangeListManager.getInstance(project).getDefaultChangeList();
RollbackChangesDialog.rollbackChanges(project, currentChangeList);
}
}
@NotNull
private static List<Change> getChanges(final Project project, final AnActionEvent e) {
Change[] changes = e.getData(VcsDataKeys.CHANGES);
if (changes == null) {
final VirtualFile[] files = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY);
if (files != null) {
final ChangeListManager clManager = ChangeListManager.getInstance(project);
final List<Change> changesList = new ArrayList<>();
for (VirtualFile vf : files) {
changesList.addAll(clManager.getChangesIn(vf));
}
if (!changesList.isEmpty()) {
changes = changesList.toArray(new Change[0]);
}
}
}
if (changes != null && changes.length > 0) {
return ContainerUtil.newArrayList(changes);
}
return Collections.emptyList();
}
@Nullable
private static LinkedHashSet<VirtualFile> getModifiedWithoutEditing(final AnActionEvent e, Project project) {
final List<VirtualFile> modifiedWithoutEditing = e.getData(VcsDataKeys.MODIFIED_WITHOUT_EDITING_DATA_KEY);
if (modifiedWithoutEditing != null && modifiedWithoutEditing.size() > 0) {
return new LinkedHashSet<>(modifiedWithoutEditing);
}
final VirtualFile[] virtualFiles = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY);
if (virtualFiles != null && virtualFiles.length > 0) {
LinkedHashSet<VirtualFile> result = new LinkedHashSet<>(Arrays.asList(virtualFiles));
result.retainAll(ChangeListManager.getInstance(project).getModifiedWithoutEditing());
return result;
}
return null;
}
private static void rollbackModifiedWithoutEditing(final Project project, final LinkedHashSet<VirtualFile> modifiedWithoutEditing) {
final String operationName = StringUtil.decapitalize(UIUtil.removeMnemonic(RollbackUtil.getRollbackOperationName(project)));
String message = (modifiedWithoutEditing.size() == 1)
? VcsBundle.message("rollback.modified.without.editing.confirm.single",
operationName, modifiedWithoutEditing.iterator().next().getPresentableUrl())
: VcsBundle.message("rollback.modified.without.editing.confirm.multiple",
operationName, modifiedWithoutEditing.size());
int rc = showYesNoDialog(project, message, VcsBundle.message("changes.action.rollback.title", operationName), getQuestionIcon());
if (rc != Messages.YES) {
return;
}
final List<VcsException> exceptions = new ArrayList<>();
final ProgressManager progressManager = ProgressManager.getInstance();
final Runnable action = () -> {
final ProgressIndicator indicator = progressManager.getProgressIndicator();
try {
ChangesUtil.processVirtualFilesByVcs(project, modifiedWithoutEditing, (vcs, items) -> {
final RollbackEnvironment rollbackEnvironment = vcs.getRollbackEnvironment();
if (rollbackEnvironment != null) {
if (indicator != null) {
indicator.setText(vcs.getDisplayName() +
": performing " + StringUtil.toLowerCase(UIUtil.removeMnemonic(rollbackEnvironment.getRollbackOperationName())) + "...");
indicator.setIndeterminate(false);
}
rollbackEnvironment
.rollbackModifiedWithoutCheckout(items, exceptions, new RollbackProgressModifier(items.size(), indicator));
if (indicator != null) {
indicator.setText2("");
}
}
});
}
catch (ProcessCanceledException e) {
// for files refresh
}
if (!exceptions.isEmpty()) {
AbstractVcsHelper.getInstance(project).showErrors(exceptions, VcsBundle.message("rollback.modified.without.checkout.error.tab",
operationName));
}
VfsUtil.markDirty(true, false, VfsUtilCore.toVirtualFileArray(modifiedWithoutEditing));
VirtualFileManager.getInstance().asyncRefresh(() -> {
for (VirtualFile virtualFile : modifiedWithoutEditing) {
VcsDirtyScopeManager.getInstance(project).fileDirty(virtualFile);
}
});
};
progressManager.runProcessWithProgressSynchronously(action, operationName, true, project);
}
}
| |
/*
* Copyright (c) 2015, Absolute Performance, Inc. http://www.absolute-performance.com
* Copyright (c) 2017, Jack J. Woehr jwoehr@softwoehr.com
* SoftWoehr LLC PO Box 51, Golden CO 80402-0051 http://www.softwoehr.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package ublu;
import java.io.PrintStream;
import java.util.logging.Logger;
import org.sblim.cimclient.internal.cim.CIMVersion;
import ublu.util.Generics.StringArrayList;
import ublu.util.GetArgs;
import ublu.util.Interpreter;
import ublu.util.InterpreterLogger;
/**
* Main class of the interpretive application. Application controller.
*
* @author jwoehr
*/
public class Ublu {
private GetArgs myGetArgs;
private StringArrayList originalArgs;
private boolean goubluing = false;
private static boolean windowing = false;
/**
* Return Ublu's GetArgs object
*
* @return Ublu's GetArgs object
*/
public GetArgs getMyGetArgs() {
return myGetArgs;
}
/**
* True if command-line switch told us we're running under Goublu.
*
* @return True if command-line switch told us we're running under Goublu
*/
public boolean isGoubluing() {
return goubluing;
}
/**
* True if we're running as UbluWin.
*
* @return True if we're running as UbluWin.
*/
public boolean isWindowing() {
return windowing;
}
/**
* Our special logging instance that no, does NOT conform to Java design
* recommendations.
*/
protected Logger LOG;
/**
* Singleton main interpreter
*/
private static Interpreter mainInterpreter;
/**
* Get main interpreter instance
*
* @return main interpreter instance
*/
public final static Interpreter getMainInterpreter() {
return mainInterpreter;
}
/**
* Set main interpreter instance
*
* @param interpreter main interpreter instance
*/
protected final static void setMainInterpreter(Interpreter interpreter) {
mainInterpreter = interpreter;
}
/**
* Get the user
*
* @return the user.name from System.properties
*/
public final static String getUser() {
return System.getProperty("user.name");
}
/**
* Get the logger for the Interpreter instance.
*
* @return the logger for the Interpreter instance.
*/
public Logger getLogger() {
return LOG;
}
/**
* Get the Ublu version
*
* @return String describing the Ublu version
*/
public final static String ubluVersion() {
return Version.ubluVersion;
}
/**
* Get the compile date and time
*
* @return String describing the build
*/
public final static String compileDateTime() {
return Version.compileDateTime;
}
static String HELPLINE
= "Type help for help. Type license for license. Type bye to exit.";
/**
* Create a string enumerating the open source projects used by Interpreter.
*
* @return a string enumerating the open source projects used by
* Interpreter.
*/
public final static String openSourceList() {
StringBuilder sb = new StringBuilder();
return sb // .append(utilities.AboutToolbox.getVersionDescription())
.append("\n---\n")
.append("Postgresql ")
.append(org.postgresql.Driver.getVersion()).append('\n')
.append("Copyright (c) 1997-2011, PostgreSQL Global Development Group\n")
.append("All rights reserved http://www.postgresql.org")
.append("\n---\n")
.append("tn5250j http://tn5250j.sourceforge.net/\n")
.append("NO WARRANTY (GPL) see the file tn5250_LICENSE")
.append("\n---\n")
.append(CIMVersion.getProductName()).append(' ')
.append(CIMVersion.getVersion()).append(' ')
.append(CIMVersion.getBuildDate()).append(' ')
.append(CIMVersion.getBuildTime()).append('\n')
.append("http://sblim.cvs.sourceforge.net/viewvc/sblim/jsr48-client/").append('\n')
.append(CIMVersion.getCopyright()).append('\n')
.append("Eclipse Public License https://opensource.org/licenses/eclipse-1.0.php")
.append("\n---\n")
.append(com.softwoehr.pigiron.Version.getVersion())
.append(" http://pigiron.sourceforge.net\n")
.append("Copyright (c) 2008-2016 Jack J. Woehr, PO Box 51, Golden CO 80402 USA\n")
.append("All Rights Reserved")
.append("\n---\n")
.append("org.json\n")
.append("Copyright (c) 2002 JSON.org")
.toString();
}
/**
* Create a string introducing the program.
*
* @return a string introducing the program.
*/
public final static String startupMessage() {
StringBuilder sb = new StringBuilder("Ublu ");
return sb.append(ubluVersion())
.append(" build of ").append(compileDateTime()).append("\n")
.append("Author: Jack J. Woehr.\n")
.append("Copyright 2015, Absolute Performance, Inc., http://www.absolute-performance.com\n")
.append("Copyright 2017, Jack J. Woehr, http://www.softwoehr.com\n")
.append("All Rights Reserved\n")
.append("Ublu is Open Source Software under the BSD 2-clause license.\n")
.append("THERE IS NO WARRANTY and NO GUARANTEE OF CORRECTNESS NOR APPLICABILITY.\n")
.append("***\n")
.append("Ublu utilizes the following open source projects:")
.append(openSourceList())
.append("\n***")
.toString();
}
/**
* String describing invocation usage
*
* @return String describing invocation usage
*/
public final static String invocationHelp() {
StringBuilder sb = new StringBuilder();
sb.append("Ublu invocation: java [ java options .. ] -jar ublu.jar [ ublu options .. ] [ ublu commands .. ]\n");
sb.append("Ublu options:\n");
sb.append(" -i filename [-i filename ..]\tinclude all indicated source files\n");
sb.append(" -s\t\t\t\tif including, include silently, otherwise startup interpreter silently\n");
sb.append(" -t [filename, --]\t\topen history file filename or default if --\n");
sb.append(" -h\t\t\t\tdisplay this help and then exit\n");
return sb.toString();
}
/**
* Reinit logger e.g., after creating windowing
*
* @param out logging stream
*/
public void reinitLogger(PrintStream out) {
LOG = new InterpreterLogger("UbluInterpreter." + Thread.currentThread().toString(), Logger.getLogger(Ublu.class.getName()), out);
}
/**
* Instance with args ready for {@link ublu.util.Interpreter} to start its
* first {@link ublu.util.Interpreter#loop()}.
*
* @param args arguments at invocation, effectively just another command
* line
*/
public Ublu(String[] args) {
this();
args = processArgs(args);
// /* Debug */ System.err.println("Args processed are " + new StringArrayList(args));
// /* Debug */ System.err.println("GetArgs processed are " + myGetArgs);
setMainInterpreter(new Interpreter(args, this));
LOG = new InterpreterLogger("UbluInterpreter." + Thread.currentThread().toString(), Logger.getLogger(Ublu.class.getName()), getMainInterpreter().getErroutStream());
}
/**
* Singleton instance
*/
public static Ublu ubluSingleton;
/**
* Saves singleton instance
*/
protected Ublu() {
ubluSingleton = this;
}
/**
* Run the singleton main interpreter
*
* @return the global return value
*/
public int runMainInterpreter() {
Interpreter interpreter = getMainInterpreter();
if (myGetArgs.containsOpt("-h")) {
interpreter.outputerrln(startupMessage());
interpreter.outputerrln(invocationHelp());
} else {
if (myGetArgs.containsOpt("-g")) {
goubluing = true;
}
if (myGetArgs.containsOpt("-t")) { // Instance history (with filename, if provided)
StringArrayList sal = myGetArgs.getAllIdenticalOptionArguments("-t");
if (!sal.isEmpty()) {
String hfilename = sal.get(0);
if (hfilename != null) {
interpreter.setHistoryFileName(hfilename);
}
}
interpreter.instanceHistory();
}
if (myGetArgs.containsOpt("-i")) {
for (String i : myGetArgs.getAllIdenticalOptionArguments("-i")) {
interpreter.getArgArray().add(0, "include");
interpreter.getArgArray().add(1, i);
if (myGetArgs.containsOpt("-s")) {
interpreter.getArgArray().add(1, "-s");
}
}
interpreter.loop();
interpreter.interpret();
} else if (interpreter.getArgArray().isEmpty()) {
if (!myGetArgs.containsOpt("-s")) {
if (interpreter.isConsole()) {
interpreter.outputerrln(startupMessage());
interpreter.outputerrln(HELPLINE);
} else if (isGoubluing()) {
interpreter.outputln(startupMessage());
interpreter.outputln(HELPLINE);
}
}
interpreter.interpret();
} else {
interpreter.loop();
}
interpreter.closeHistory();
interpreter.getErroutStream().flush();
interpreter.getOutputStream().flush();
}
return interpreter.getGlobal_ret_val();
}
/**
* Run a command or run the interpreter. This is the main() of the Main
* Class of the system.
* <p>
* <p>
* Exits the Java virtual machine returning to the system caller the retval
* of the last command executed, either
* {@link ublu.command.CommandInterface.COMMANDRESULT#SUCCESS} (0) if the
* last executed or
* {@link ublu.command.CommandInterface.COMMANDRESULT#FAILURE} (1).</p>
*
* @param args commands to program in the program's syntax. Any strings
* starting with a dash <code>-</code> are taken to be switches to the ublu
* invocation itself and are processed and removed from the args passed to
* the interpreter.
*/
public static void main(String[] args) {
// System.exit(niam(args));
int result = niam(args);
if (!windowing) {
System.exit(result);
}
}
/**
* Run a command or run the interpreter. This is a factor of the Main Class
* main() of the system. This method does not System.exit, merely returning
* the retval of the last command. Useful for calling Ublu from another Java
* program.
*
* @param args commands to program in the program's syntax. Any strings
* starting with a dash <code>-</code> are taken to be switches to the ublu
* invocation itself and are processed and removed from the args passed to
* the interpreter.
* @return retval of the last command executed, either
* {@link ublu.command.CommandInterface.COMMANDRESULT#SUCCESS} (0) if the
* last executed normally or otherwise
* {@link ublu.command.CommandInterface.COMMANDRESULT#FAILURE} (1).
*/
public static int niam(String[] args) {
int result = 0;
Ublu ublu = new Ublu(args);
result = ublu.runMainInterpreter();
return result;
}
/**
* Get the value of originalArgs
*
* @return the value of originalArgs
*/
public StringArrayList getOriginalArgs() {
return originalArgs;
}
private StringArrayList switches = new StringArrayList();
/**
* Get the value of switches
*
* @return the value of switches
*/
public StringArrayList getSwitches() {
return switches;
}
/**
* Set the value of switches
*
* @param switches new value of switches
*/
public void setSwitches(StringArrayList switches) {
this.switches = switches;
}
/**
* Set the value of originalArgs
*
* @param originalArgs new value of originalArgs
*/
public void setOriginalArgs(StringArrayList originalArgs) {
this.originalArgs = originalArgs;
}
// private String[] oldProcessArgs(String[] args) {
// StringArrayList remainderArgs = new StringArrayList(args);
// setOriginalArgs(new StringArrayList(args));
// for (String s : getOriginalArgs()) {
// if (s.startsWith("-")) {
// getSwitches().add(s);
// } else {
// break;
// }
// }
// for (String switche : getSwitches()) {
// remainderArgs.remove(0);
// }
// return remainderArgs.toStringArray();
// }
private String[] processArgs(String[] args) {
// /* Debug */ System.err.println("args are " + Arrays.toString(args));
myGetArgs = new GetArgs(args);
// /* Debug */ System.err.println("myGetArgs is " + myGetArgs);
return myGetArgs.getArgumentsAsStringArray();
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.scim.common.group;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.scim.common.utils.IdentitySCIMException;
import org.wso2.carbon.identity.scim.common.utils.SCIMCommonUtils;
import org.wso2.carbon.identity.scim.common.utils.SQLQueries;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.charon.core.schema.SCIMConstants;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* JDBC based Data Access layer for managing SCIM specific attributes that are not stored in
* user store.
*/
public class GroupDAO {
private static Log log = LogFactory.getLog(GroupDAO.class);
/**
* Lists the groups that are created from SCIM
*
* @return The set of groups that were created from SCIM
* @throws IdentitySCIMException
*/
public Set<String> listSCIMGroups() throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
ResultSet resultSet = null;
Set<String> groups = new HashSet<>();
try {
//retrieve groups from the DB
prepStmt = connection.prepareStatement(SQLQueries.LIST_SCIM_GROUPS_SQL);
prepStmt.setString(1, SCIMConstants.ID_URI);
resultSet = prepStmt.executeQuery();
while (resultSet.next()) {
String group = resultSet.getString(1);
if (StringUtils.isNotEmpty(group)) {
groups.add(group);
}
}
} catch (SQLException e) {
throw new IdentitySCIMException("Error when reading the SCIM Group information from persistence store.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, resultSet, prepStmt);
}
return groups;
}
public boolean isExistingGroup(String groupName, int tenantId) throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
ResultSet rSet = null;
boolean isExistingGroup = false;
try {
prepStmt = connection.prepareStatement(SQLQueries.CHECK_EXISTING_GROUP_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, SCIMCommonUtils.getGroupNameWithDomain(groupName));
rSet = prepStmt.executeQuery();
if (rSet.next()) {
isExistingGroup = true;
}
connection.commit();
} catch (SQLException e) {
throw new IdentitySCIMException("Error when reading the group information from the persistence store.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rSet, prepStmt);
}
return isExistingGroup;
}
private boolean isExistingAttribute(String attributeName, String groupName, int tenantId)
throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
ResultSet rSet = null;
boolean isExistingAttribute = false;
try {
prepStmt = connection.prepareStatement(SQLQueries.CHECK_EXISTING_ATTRIBUTE_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, SCIMCommonUtils.getGroupNameWithDomain(groupName));
prepStmt.setString(3, attributeName);
rSet = prepStmt.executeQuery();
if (rSet.next()) {
isExistingAttribute = true;
}
connection.commit();
} catch (SQLException e) {
throw new IdentitySCIMException("Error when reading the group attribute information from " +
"the persistence store.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rSet, prepStmt);
}
return isExistingAttribute;
}
public void addSCIMGroupAttributes(int tenantId, String roleName, Map<String, String> attributes)
throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
if (!isExistingGroup(SCIMCommonUtils.getGroupNameWithDomain(roleName), tenantId)) {
try {
prepStmt = connection.prepareStatement(SQLQueries.ADD_ATTRIBUTES_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, roleName);
for (Map.Entry<String, String> entry : attributes.entrySet()) {
if (!isExistingAttribute(entry.getKey(),
SCIMCommonUtils.getGroupNameWithDomain(roleName), tenantId)) {
prepStmt.setString(3, entry.getKey());
prepStmt.setString(4, entry.getValue());
prepStmt.execute();
connection.commit();
} else {
throw new IdentitySCIMException("Error when adding SCIM Attribute: "
+ entry.getKey()
+ " An attribute with the same name already exists.");
}
}
connection.commit();
} catch (SQLException e) {
throw new IdentitySCIMException("Error when adding SCIM attributes for the group: "
+ roleName, e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, null, prepStmt);
}
} else {
throw new IdentitySCIMException("Error when adding SCIM Attributes for the group: "
+ roleName + " A Group with the same name already exists.");
}
}
public void updateSCIMGroupAttributes(int tenantId, String roleName,
Map<String, String> attributes) throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
if (isExistingGroup(SCIMCommonUtils.getGroupNameWithDomain(roleName), tenantId)) {
try {
prepStmt = connection.prepareStatement(SQLQueries.UPDATE_ATTRIBUTES_SQL);
prepStmt.setInt(2, tenantId);
prepStmt.setString(3, roleName);
for (Map.Entry<String, String> entry : attributes.entrySet()) {
if (isExistingAttribute(entry.getKey(),
SCIMCommonUtils.getGroupNameWithDomain(roleName), tenantId)) {
prepStmt.setString(4, entry.getKey());
prepStmt.setString(1, entry.getValue());
int count = prepStmt.executeUpdate();
if (log.isDebugEnabled()) {
log.debug("No. of records updated for updating SCIM Group : " + count);
}
connection.commit();
} else {
throw new IdentitySCIMException("Error when adding SCIM Attribute: "
+ entry.getKey()
+ " An attribute with the same name doesn't exists.");
}
}
} catch (SQLException e) {
throw new IdentitySCIMException("Error updating the SCIM Group Attributes.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, null, prepStmt);
}
} else {
throw new IdentitySCIMException("Error when updating SCIM Attributes for the group: "
+ roleName + " A Group with the same name doesn't exists.");
}
}
public void removeSCIMGroup(int tenantId, String roleName) throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
try {
prepStmt = connection.prepareStatement(SQLQueries.DELETE_GROUP_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, SCIMCommonUtils.getGroupNameWithDomain(roleName));
prepStmt.execute();
connection.commit();
} catch (SQLException e) {
log.error("Error when executing the SQL : " + SQLQueries.DELETE_GROUP_SQL);
throw new IdentitySCIMException("Error deleting the SCIM Group.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, null, prepStmt);
}
}
public Map<String, String> getSCIMGroupAttributes(int tenantId, String roleName)
throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
ResultSet rSet = null;
Map<String, String> attributes = new HashMap<>();
try {
prepStmt = connection.prepareStatement(SQLQueries.GET_ATTRIBUTES_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, SCIMCommonUtils.getGroupNameWithDomain(roleName));
rSet = prepStmt.executeQuery();
while (rSet.next()) {
if (StringUtils.isNotEmpty(rSet.getString(1))) {
attributes.put(rSet.getString(1), rSet.getString(2));
}
}
connection.commit();
} catch (SQLException e) {
log.error("Error when executing the SQL : " + SQLQueries.GET_ATTRIBUTES_SQL);
throw new IdentitySCIMException("Error when reading the SCIM Group information from the " +
"persistence store.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rSet, prepStmt);
}
return attributes;
}
public String getGroupNameById(int tenantId, String id) throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
ResultSet rSet = null;
String roleName = null;
try {
prepStmt = connection.prepareStatement(SQLQueries.GET_GROUP_NAME_BY_ID_SQL);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, id);
prepStmt.setString(3, SCIMConstants.ID_URI);
rSet = prepStmt.executeQuery();
while (rSet.next()) {
//we assume only one result since group id and tenant id is unique.
roleName = rSet.getString(1);
}
connection.commit();
} catch (SQLException e) {
throw new IdentitySCIMException("Error when reading the SCIM Group information from the persistence store.", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rSet, prepStmt);
}
if (StringUtils.isNotEmpty(roleName)) {
return SCIMCommonUtils.getPrimaryFreeGroupName(roleName);
}
return null;
}
public void updateRoleName(int tenantId, String oldRoleName, String newRoleName)
throws IdentitySCIMException {
Connection connection = IdentityDatabaseUtil.getDBConnection();
PreparedStatement prepStmt = null;
if (isExistingGroup(SCIMCommonUtils.getGroupNameWithDomain(oldRoleName), tenantId)) {
try {
prepStmt = connection.prepareStatement(SQLQueries.UPDATE_GROUP_NAME_SQL);
prepStmt.setString(1, SCIMCommonUtils.getGroupNameWithDomain(newRoleName));
prepStmt.setInt(2, tenantId);
prepStmt.setString(3, SCIMCommonUtils.getGroupNameWithDomain(oldRoleName));
int count = prepStmt.executeUpdate();
if (log.isDebugEnabled()) {
log.debug("No. of records updated for updating SCIM Group : " + count);
}
connection.commit();
} catch (SQLException e) {
throw new IdentitySCIMException("Error updating the SCIM Group Attributes", e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, null, prepStmt);
}
} else {
throw new IdentitySCIMException("Error when updating role name of the role: " + oldRoleName);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2006, 2008 QNX Software Systems and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* QNX - Initial API and implementation
* Markus Schorn (Wind River Systems)
*******************************************************************************/
package org.eclipse.cdt.internal.pdom.tests;
import java.io.File;
import java.util.regex.Pattern;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import org.eclipse.cdt.core.dom.ILinkage;
import org.eclipse.cdt.core.dom.IName;
import org.eclipse.cdt.core.dom.ast.IASTFileLocation;
import org.eclipse.cdt.core.dom.ast.IBinding;
import org.eclipse.cdt.core.dom.ast.ICompositeType;
import org.eclipse.cdt.core.dom.ast.ITypedef;
import org.eclipse.cdt.core.index.IIndex;
import org.eclipse.cdt.core.index.IIndexBinding;
import org.eclipse.cdt.core.index.IIndexFile;
import org.eclipse.cdt.core.index.IIndexFileLocation;
import org.eclipse.cdt.core.index.IIndexName;
import org.eclipse.cdt.core.index.IndexFilter;
import org.eclipse.cdt.core.index.IndexLocationFactory;
import org.eclipse.cdt.core.model.ICProject;
import org.eclipse.cdt.core.model.ITranslationUnit;
import org.eclipse.cdt.core.testplugin.util.TestSourceReader;
import org.eclipse.cdt.internal.core.CCoreInternals;
import org.eclipse.cdt.internal.core.index.IIndexFragmentName;
import org.eclipse.cdt.internal.core.pdom.PDOM;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.jface.text.BadLocationException;
/**
* Test that PDOM correctly track declarations, definitions and references of
* objects
*
* @author ELaskavaia@qnx.com
*
*/
public class DefDeclTests extends PDOMTestBase {
private String projectName = null;
protected PDOM pdom;
protected ICProject cproject;
public static Test suite() {
return suite(DefDeclTests.class);
}
@Override
protected void setUp() throws Exception {
String requiredName = "defDeclTests";
cproject = createProject(requiredName);
this.projectName = cproject.getElementName();
pdom = (PDOM) CCoreInternals.getPDOMManager().getPDOM(cproject);
pdom.acquireReadLock();
}
@Override
protected void tearDown() throws Exception {
pdom.releaseReadLock();
if (cproject != null) {
cproject.getProject().delete(IResource.FORCE | IResource.ALWAYS_DELETE_PROJECT_CONTENT, new NullProgressMonitor());
}
}
private IBinding findSingleBinding(String elName) throws CoreException {
IBinding[] binds = pdom.findBindings(Pattern.compile(elName), true,
IndexFilter.ALL, new NullProgressMonitor());
assertEquals(1, binds.length);
assertEquals(elName, binds[0].getName());
IBinding element = binds[0];
return element;
}
private void checkReference(IBinding element, String mark, int checkCount)
throws Exception {
checkUsage(element, mark, checkCount, IIndex.FIND_REFERENCES);
}
private void checkDeclaration(IBinding element, String mark, int num)
throws Exception {
checkUsage(element, mark, num, IIndex.FIND_DECLARATIONS);
}
private void checkDefinition(IBinding element, String mark, int countNum)
throws Exception {
checkUsage(element, mark, countNum, IIndex.FIND_DEFINITIONS);
}
private void checkUsage(IBinding element, String mark, int countNum,
int flags) throws Exception {
if (mark == null || countNum == 0) {
getFirstUsage(element, 0, flags);
} else {
IName[] usage = pdom.findNames(element, flags);
if (countNum >= 0)
assertEquals(countNum, usage.length);
String fail = null;
boolean found = false;
for (int i = 0; i < usage.length; i++) {
IName name = usage[i];
IASTFileLocation loc = name.getFileLocation();
String fileName = new File(loc.getFileName()).getName();
int markLine;
try {
markLine = getMarkLine(mark, fileName);
} catch (AssertionFailedError e) {
fail = e.getMessage();
continue;
}
int nodeLine = getLineNumber(loc.getNodeOffset(), fileName);
if (markLine != nodeLine) {
fail = "Marker at line " + markLine + ", actual at line "
+ nodeLine;
} else {
found = true;
}
}
if (found == false)
fail(fail);
}
}
/**
* Get references defined by flags. If k>0 check that there are k of them.
*
* @param binding
* @param k -
* number of references, if k==-1 no check
* @return first references or null of non
* @throws CoreException
*/
private IName getFirstUsage(IBinding binding, int k, int flags)
throws CoreException {
IName[] decls = pdom.findNames(binding, flags);
if (k >= 0)
assertEquals(k, decls.length);
if (decls.length > 0) {
IName ref = decls[0];
return ref;
} else {
return null;
}
}
protected void assertAtMark(IASTFileLocation loc, String mark)
throws Exception {
String fileName = new File(loc.getFileName()).getName();
int markLine = getMarkLine(mark, fileName);
int nodeLine = getLineNumber(loc.getNodeOffset(), fileName);
assertEquals(markLine, nodeLine);
}
private int getMarkLine(String mark, String fileName) throws Exception,
BadLocationException {
int markLine = getLineNumber(offset(fileName, mark), fileName);
return markLine;
}
protected int getLineNumber(int position, String projectRelativePath)
throws Exception {
Path fullPath = new Path(projectName + "/" + projectRelativePath);
return TestSourceReader.getLineNumber(position, fullPath);
}
public void assertDefDeclRef(String name, String testNum, int def,
int decl, int ref) throws Exception {
String elName = name + testNum;
IBinding binding = findSingleBinding(elName);
checkDefinition(binding, "def" + testNum, def);
checkDeclaration(binding, "decl" + testNum, decl);
checkReference(binding, "ref" + testNum, ref);
}
private IIndexFile getSingleFile(IIndexFileLocation ifl) throws CoreException {
IIndexFile[] files= pdom.getFiles(ILinkage.C_LINKAGE_ID, ifl);
assertEquals(1, files.length);
return files[0];
}
/* ------------------ Tests Started Here ------------------------ */
public void testInit() {
// will fail if setUp fails, maybe timelimit is too small for warm-up
}
public void testSimpleDeclUsage_f01() throws Exception {
assertDefDeclRef("foo", "01", 0, 1, 1);
}
public void testKRDeclUsage_f02() throws Exception {
assertDefDeclRef("foo", "02", 0, 1, 1);
}
public void testImplicitDeclPostDecl_f03() throws Exception {
assertDefDeclRef("foo", "03", 0, 1, 1);
}
public void testImplicitDeclPostDef_f04() throws Exception {
assertDefDeclRef("foo", "04", 1, 0, 1);
}
public void testImplicitDeclNone_f05() throws Exception {
assertDefDeclRef("foo", "05", 0, 0, 1);
}
public void testNonLocalDefintion_f06() throws Exception {
assertDefDeclRef("foo", "06", 1, 1, 1);
}
public void testWrongMatchedStaticDefinition() throws Exception {
String elName = "foo" + "07";
IIndexBinding[] binds = pdom.findBindings(Pattern.compile(elName), true, IndexFilter.ALL, new NullProgressMonitor());
assertEquals(2, binds.length);
assertTrue(binds[0].isFileLocal() != binds[1].isFileLocal());
if (binds[0].isFileLocal()) {
IIndexBinding b= binds[0]; binds[0]= binds[1]; binds[1]= b;
}
assertEquals(elName, binds[0].getName());
checkDefinition(binds[0], "def" + "07", 0);
checkDeclaration(binds[0], "decl" + "07", 1);
checkReference(binds[0], "ref" + "07", 1);
assertEquals(elName, binds[1].getName());
assertTrue(binds[1].getLocalToFile().getLocation().getFullPath().endsWith("second.c"));
checkDefinition(binds[1], "def" + "07", 1);
checkDeclaration(binds[1], "decl" + "07", 0);
checkReference(binds[1], "ref" + "07", 0);
}
public void testStaticBindings_f08() throws Exception {
String elName = "foo" + "08";
IIndexFileLocation ifl= IndexLocationFactory.getIFL((ITranslationUnit) cproject.findElement(new Path("func.c")));
IIndexFile file= getSingleFile(ifl);
int offset= TestSourceReader.indexOfInFile("foo08();", new Path(ifl.getFullPath()));
IIndexName[] names= file.findNames(offset, 5);
assertEquals(1, names.length);
IBinding element = pdom.findBinding((IIndexFragmentName)names[0]);
assertEquals(elName, element.getName());
checkDefinition(element, "def" + "08", 1);
checkReference(element, "ref" + "08", 1);
// check the other file
ifl= IndexLocationFactory.getIFL((ITranslationUnit) cproject.findElement(new Path("second.c")));
file= getSingleFile(ifl);
offset= TestSourceReader.indexOfInFile("foo08();", new Path(ifl.getFullPath()));
names= file.findNames(offset, 5);
assertEquals(1, names.length);
element = pdom.findBinding((IIndexFragmentName)names[0]);
assertEquals(elName, element.getName());
checkDefinition(element, "defS" + "08", 1);
checkReference(element, "refS" + "08", 1);
}
public void testSimpleGlobalWrite_v09() throws Exception {
assertDefDeclRef("var", "_v09", 1, 0, 1);
}
public void testGlobalInitRead_v10() throws Exception {
assertDefDeclRef("var", "_v10", 1, 0, 1);
}
public void testGlobalInitRead2_v11() throws Exception {
assertDefDeclRef("var", "_v11", 1, 0, 1);
}
public void testDeclUseDef_v12() throws Exception {
assertDefDeclRef("var", "_v12", 1, 1, 1);
}
public void testDeclDefUse_v13() throws Exception {
assertDefDeclRef("var", "_v13", 1, 1, 1);
}
public void testDefDeclUse_v14() throws Exception {
// Hmm. This test seems to work, but Find Declaration in the UI does not
// work
assertDefDeclRef("var", "_v14", 1, 1, 1);
}
public void testNamedStruct_t01() throws Exception {
assertDefDeclRef("type", "_t01", 1, 0, 1);
}
public void testStructPreDefintion_t02() throws Exception {
assertDefDeclRef("type", "_t02", 0, 1, 1);
}
public void testStructRecursive_t03() throws Exception {
assertDefDeclRef("type", "_t03", 1, 1, 1);
}
public void testStructAndTypedef_t04() throws Exception {
String num = "_t04";
String elName = "type" + num;
IBinding[] bindings = pdom.findBindings(Pattern.compile(elName), false, IndexFilter.ALL, new NullProgressMonitor());
assertEquals(2,bindings.length);
IBinding typedef = bindings[0] instanceof ITypedef ? bindings[0] : bindings[1];
IBinding struct = bindings[0] instanceof ICompositeType ? bindings[0] : bindings[1];
checkReference(typedef, "ref" + num, 1);
checkDefinition(typedef, "def" + num, 1);
checkReference(struct, "refS" + num, 1);
checkDefinition(struct, "defS" + num, 1);
}
public void testTypedefAndAnonymousStruct_t05() throws Exception {
assertDefDeclRef("type", "_t05", 1, 0, 1);
}
}
| |
/*
* Copyright 2015-2017 Austin Keener & Michael Ritter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.dv8tion.jda.core;
import net.dv8tion.jda.bot.JDABot;
import net.dv8tion.jda.client.JDAClient;
import net.dv8tion.jda.core.entities.*;
import net.dv8tion.jda.core.hooks.IEventManager;
import net.dv8tion.jda.core.managers.Presence;
import net.dv8tion.jda.core.requests.RestAction;
import org.apache.http.HttpHost;
import java.util.Collection;
import java.util.List;
/**
* The core of JDA. Acts as a registry system of JDA. All parts of the the API can be accessed starting from this class.
*/
public interface JDA
{
/**
* Represents the connection status of JDA and its Main WebSocket.
*/
enum Status
{
/**JDA is currently setting up supporting systems like the AudioSystem.*/
INITIALIZING,
/**JDA has finished setting up supporting systems and is ready to log in.*/
INITIALIZED,
/**JDA is currently attempting to log in.*/
LOGGING_IN,
/**JDA is currently attempting to connect it's websocket to Discord.*/
CONNECTING_TO_WEBSOCKET,
/**JDA has successfully connected it's websocket to Discord and is populating internal objects.
* This process often takes the longest of all Statuses (besides CONNECTED)*/
LOADING_SUBSYSTEMS,
/**JDA has finished loading everything, is receiving information from Discord and is firing events.*/
CONNECTED,
/**JDA's main websocket has been disconnected. This <b>DOES NOT</b> mean JDA has shutdown permanently.
* This is an in-between status. Most likely ATTEMPTING_TO_RECONNECT or SHUTTING_DOWN/SHUTDOWN will soon follow.*/
DISCONNECTED,
/**When trying to reconnect to Discord JDA encountered an issue, most likely related to a lack of internet connection,
* and is waiting to try reconnecting again.*/
WAITING_TO_RECONNECT,
/**JDA has been disconnected from Discord and is currently trying to reestablish the connection.*/
ATTEMPTING_TO_RECONNECT,
/**JDA has received a shutdown request or has been disconnected from Discord and reconnect is disabled, thus,
* JDA is in the process of shutting down*/
SHUTTING_DOWN,
/**JDA has finished shutting down and this instance can no longer be used to communicate with the Discord servers.*/
SHUTDOWN,
/**While attempting to authenticate, Discord reported that the provided authentication information was invalid.*/
FAILED_TO_LOGIN,
}
/**
* Represents the information used to create this shard.
*/
class ShardInfo
{
int shardId;
int shardTotal;
ShardInfo(int shardId, int shardTotal)
{
this.shardId = shardId;
this.shardTotal = shardTotal;
}
/**
* Represents the id of the shard of the current instance.
* <br>This value will be between 0 and ({@link #getShardTotal()} - 1).
*
* @return The id of the currently logged in shard.
*/
public int getShardId()
{
return shardId;
}
/**
* The total amount of shards based on the value provided during JDA instance creation using
* {@link JDABuilder#useSharding(int, int)}.
* <br>This <b>does not</b> query Discord to determine the total number of shards.
* <br>This <b>does not</b> represent the amount of logged in shards.
* <br>It strictly represents the integer value provided to discord
* representing the total amount of shards that the developer indicated that it was going to use when
* initially starting JDA.
*
* @return The total of shards based on the total provided by the developer during JDA initialization.
*/
public int getShardTotal()
{
return shardTotal;
}
/**
* Provides a shortcut method for easily printing shard info.
* <br>Format: "[# / #]"
* <br>Where the first # is shardId and the second # is shardTotal.
*
* @return A String representing the information used to build this shard.
*/
public String getShardString()
{
return "[" + shardId + " / " + shardTotal + "]";
}
@Override
public String toString()
{
return "Shard " + getShardString();
}
@Override
public boolean equals(Object o)
{
if (!(o instanceof ShardInfo))
return false;
ShardInfo oInfo = (ShardInfo) o;
return shardId == oInfo.shardId && shardTotal == oInfo.shardTotal;
}
}
/**
* Gets the current {@link net.dv8tion.jda.core.JDA.Status Status} of the JDA instance.
*
* @return Current JDA status.
*/
Status getStatus();
/**
* The time in milliseconds that discord took to respond to our last heartbeat
* <br>This roughly represents the WebSocket ping of this session
*
* <p><b>{@link net.dv8tion.jda.core.requests.RestAction RestAction} request times do not
* correlate to this value!</b>
*
* @return time in milliseconds between heartbeat and the heartbeat ack response
*/
long getPing();
/**
* Contains all {@code cf-ray} headers that JDA received in this session.
* <br>These receive a new value whenever the WebSockedClient reconnects to the gateway.
*
* <p>This is useful to monitor cloudflare activity from the Discord Developer perspective.
* <br>Use this list to report connection issues.
*
* @return Immutable list of all cf-ray values for this session
*/
List<String> getCloudflareRays();
/**
* Changes the internal EventManager.
*
* <p>The default EventManager is {@link net.dv8tion.jda.core.hooks.InterfacedEventManager InterfacedEventListener}.
* <br>There is also an {@link net.dv8tion.jda.core.hooks.AnnotatedEventManager AnnotatedEventManager} available.
*
* @param manager
* The new EventManager to use
*/
void setEventManager(IEventManager manager);
/**
* Adds all provided listeners to the event-listeners that will be used to handle events.
* This uses the {@link net.dv8tion.jda.core.hooks.InterfacedEventManager InterfacedEventListener} by default.
* To switch to the {@link net.dv8tion.jda.core.hooks.AnnotatedEventManager AnnotatedEventManager}, use {@link #setEventManager(IEventManager)}.
*
* Note: when using the {@link net.dv8tion.jda.core.hooks.InterfacedEventManager InterfacedEventListener} (default),
* given listener <b>must</b> be instance of {@link net.dv8tion.jda.core.hooks.EventListener EventListener}!
*
* @param listeners
* The listener(s) which will react to events.
*/
void addEventListener(Object... listeners);
/**
* Removes all provided listeners from the event-listeners and no longer uses them to handle events.
*
* @param listeners
* The listener(s) to be removed.
*/
void removeEventListener(Object... listeners);
/**
* Returns an unmodifiable List of Objects that have been registered as EventListeners.
*
* @return List of currently registered Objects acting as EventListeners.
*/
List<Object> getRegisteredListeners();
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.User Users} that share a
* {@link net.dv8tion.jda.core.entities.Guild Guild} with the currently logged in account.
* <br>This list will never contain duplicates and represents all {@link net.dv8tion.jda.core.entities.User Users}
* that JDA can currently see.
*
* <p>If the developer is sharding, then only users from guilds connected to the specifically logged in
* shard will be returned in the List.
*
* @return List of all {@link net.dv8tion.jda.core.entities.User Users} that are visible to JDA.
*/
List<User> getUsers();
/**
* This returns the {@link net.dv8tion.jda.core.entities.User User} which has the same id as the one provided.
* <br>If there is no visible user with an id that matches the provided one, this returns {@code null}.
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.User User}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.User User} with matching id.
*/
User getUserById(String id);
/**
* This returns the {@link net.dv8tion.jda.core.entities.User User} which has the same id as the one provided.
* <br>If there is no visible user with an id that matches the provided one, this returns {@code null}.
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.User User}.
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.User User} with matching id.
*/
User getUserById(long id);
/**
* Gets all {@link net.dv8tion.jda.core.entities.Guild Guilds} that contain all given users as their members.
*
* @param users
* The users which all the returned {@link net.dv8tion.jda.core.entities.Guild Guilds} must contain.
*
* @return Unmodifiable list of all {@link net.dv8tion.jda.core.entities.Guild Guild} instances which have all {@link net.dv8tion.jda.core.entities.User Users} in them.
*/
List<Guild> getMutualGuilds(User... users);
/**
* Gets all {@link net.dv8tion.jda.core.entities.Guild Guilds} that contain all given users as their members.
*
* @param users
* The users which all the returned {@link net.dv8tion.jda.core.entities.Guild Guilds} must contain.
*
* @return Unmodifiable list of all {@link net.dv8tion.jda.core.entities.Guild Guild} instances which have all {@link net.dv8tion.jda.core.entities.User Users} in them.
*/
List<Guild> getMutualGuilds(Collection<User> users);
/**
* This unmodifiable returns all {@link net.dv8tion.jda.core.entities.User Users} that have the same username as the one provided.
* <br>If there are no {@link net.dv8tion.jda.core.entities.User Users} with the provided name, then this returns an empty list.
*
* <p><b>Note: </b> This does **not** consider nicknames, it only considers {@link net.dv8tion.jda.core.entities.User#getName()}
*
* @param name
* The name of the requested {@link net.dv8tion.jda.core.entities.User Users}.
* @param ignoreCase
* Whether to ignore case or not when comparing the provided name to each {@link net.dv8tion.jda.core.entities.User#getName()}.
*
* @return Possibly-empty list of {@link net.dv8tion.jda.core.entities.User Users} that all have the same name as the provided name.
*/
List<User> getUsersByName(String name, boolean ignoreCase);
/**
* Attempts to retrieve a {@link net.dv8tion.jda.core.entities.User User} object based on the provided id.
* <br>This first calls {@link #getUserById(long)}, and if the return is {@code null} then a request
* is made to the Discord servers.
*
* <p>The returned {@link net.dv8tion.jda.core.requests.RestAction RestAction} can encounter the following Discord errors:
* <ul>
* <li>{@link net.dv8tion.jda.core.requests.ErrorResponse#UNKNOWN_USER ErrorResponse.UNKNOWN_USER}
* <br>Occurs when the provided id does not refer to a {@link net.dv8tion.jda.core.entities.User User}
* known by Discord. Typically occurs when developers provide an incomplete id (cut short).</li>
* </ul>
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.User User}.
*
* @throws net.dv8tion.jda.core.exceptions.AccountTypeException
* This endpoint is {@link AccountType#BOT} only.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
* @throws java.lang.IllegalArgumentException
* <ul>
* <li>If the provided id String is null.</li>
* <li>If the provided id String is empty.</li>
* </ul>
*
* @return {@link net.dv8tion.jda.core.requests.RestAction RestAction} - Type: {@link net.dv8tion.jda.core.entities.User User}
* <br>On request, gets the User with id matching provided id from Discord.
*/
RestAction<User> retrieveUserById(String id);
/**
* Attempts to retrieve a {@link net.dv8tion.jda.core.entities.User User} object based on the provided id.
* <br>This first calls {@link #getUserById(long)}, and if the return is {@code null} then a request
* is made to the Discord servers.
*
* <p>The returned {@link net.dv8tion.jda.core.requests.RestAction RestAction} can encounter the following Discord errors:
* <ul>
* <li>{@link net.dv8tion.jda.core.requests.ErrorResponse#UNKNOWN_USER ErrorResponse.UNKNOWN_USER}
* <br>Occurs when the provided id does not refer to a {@link net.dv8tion.jda.core.entities.User User}
* known by Discord. Typically occurs when developers provide an incomplete id (cut short).</li>
* </ul>
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.User User}.
*
* @throws net.dv8tion.jda.core.exceptions.AccountTypeException
* This endpoint is {@link AccountType#BOT} only.
*
* @return {@link net.dv8tion.jda.core.requests.RestAction RestAction} - Type: {@link net.dv8tion.jda.core.entities.User User}
* <br>On request, gets the User with id matching provided id from Discord.
*/
RestAction<User> retrieveUserById(long id);
/**
* An unmodifiable List of all {@link net.dv8tion.jda.core.entities.Guild Guilds} that the logged account is connected to.
* <br>If this account is not connected to any {@link net.dv8tion.jda.core.entities.Guild Guilds}, this will return
* an empty list.
*
* <p>If the developer is sharding ({@link net.dv8tion.jda.core.JDABuilder#useSharding(int, int)},
* then this list will only contain the {@link net.dv8tion.jda.core.entities.Guild Guilds} that the shard is
* actually connected to. Discord determines which guilds a shard is connect to using the following format:
* <br>Guild connected if shardId == (guildId {@literal >>} 22) % totalShards;
* <br>Source for formula: <a href="https://discordapp.com/developers/docs/topics/gateway#sharding">Discord Documentation</a>
*
* @return Possibly-empty list of all the {@link net.dv8tion.jda.core.entities.Guild Guilds} that this account is connected to.
*/
List<Guild> getGuilds();
/**
* This returns the {@link net.dv8tion.jda.core.entities.Guild Guild} which has the same id as the one provided.
* <br>If there is no connected guild with an id that matches the provided one, then this returns {@code null}.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.Guild Guild}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.Guild Guild} with matching id.
*/
Guild getGuildById(String id);
/**
* This returns the {@link net.dv8tion.jda.core.entities.Guild Guild} which has the same id as the one provided.
* <br>If there is no connected guild with an id that matches the provided one, then this returns {@code null}.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.Guild Guild}.
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.Guild Guild} with matching id.
*/
Guild getGuildById(long id);
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.Guild Guilds} that have the same name as the one provided.
* <br>If there are no {@link net.dv8tion.jda.core.entities.Guild Guilds} with the provided name, then this returns an empty list.
*
* @param name
* The name of the requested {@link net.dv8tion.jda.core.entities.Guild Guilds}.
* @param ignoreCase
* Whether to ignore case or not when comparing the provided name to each {@link net.dv8tion.jda.core.entities.Guild#getName()}.
* @return
* Possibly-empty list of all the {@link net.dv8tion.jda.core.entities.Guild Guilds} that all have the same name as
* the provided name.
*/
List<Guild> getGuildsByName(String name, boolean ignoreCase);
/**
* All {@link net.dv8tion.jda.core.entities.Role Roles} this JDA instance can see.
* <br>This will iterate over each {@link net.dv8tion.jda.core.entities.Guild Guild} retrieved from
* {@link #getGuilds()} and collect its {@link net.dv8tion.jda.core.entities.Guild#getRoles() Guild.getRoles()}.
*
* @return Immutable List of all visible Roles
*/
List<Role> getRoles();
/**
* Retrieves the {@link net.dv8tion.jda.core.entities.Role Role} associated to the provided id.
* <br>This iterates over all {@link net.dv8tion.jda.core.entities.Guild Guilds} and check whether
* a Role from that Guild is assigned to the specified ID and will return the first that can be found.
*
* @param id
* The id of the searched Role
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.Role Role} for the specified ID
*/
Role getRoleById(String id);
/**
* Retrieves the {@link net.dv8tion.jda.core.entities.Role Role} associated to the provided id.
* <br>This iterates over all {@link net.dv8tion.jda.core.entities.Guild Guilds} and check whether
* a Role from that Guild is assigned to the specified ID and will return the first that can be found.
* @param id
* The id of the searched Role
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.Role Role} for the specified ID
*/
Role getRoleById(long id);
/**
* Retrieves all {@link net.dv8tion.jda.core.entities.Role Roles} visible to this JDA instance.
* <br>This simply filters the Roles returned by {@link #getRoles()} with the provided name, either using
* {@link String#equals(Object)} or {@link String#equalsIgnoreCase(String)} on {@link net.dv8tion.jda.core.entities.Role#getName()}.
*
* @param name
* The name for the Roles
* @param ignoreCase
* Whether to use {@link String#equalsIgnoreCase(String)}
*
* @return Immutable List of all Roles matching the parameters provided.
*/
List<Role> getRolesByName(String name, boolean ignoreCase);
/**
* An unmodifiable List of all {@link net.dv8tion.jda.core.entities.TextChannel TextChannels} of all connected
* {@link net.dv8tion.jda.core.entities.Guild Guilds}.
*
* <p><b>Note:</b> just because a {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} is present in this list does
* not mean that you will be able to send messages to it. Furthermore, if you log into this account on the discord
* client, it is possible that you will see fewer channels than this returns. This is because the discord client
* hides any {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} that you don't have the
* {@link net.dv8tion.jda.core.Permission#MESSAGE_READ Permission.MESSAGE_READ} permission in.
*
* @return Possibly-empty list of all known {@link net.dv8tion.jda.core.entities.TextChannel TextChannels}.
*/
List<TextChannel> getTextChannels();
/**
* This returns the {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} with an id that matches the provided
* one, then this returns {@code null}.
*
* <p><b>Note:</b> just because a {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} is present does
* not mean that you will be able to send messages to it. Furthermore, if you log into this account on the discord
* client, it is you will not see the channel that this returns. This is because the discord client
* hides any {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} that you don't have the
* {@link net.dv8tion.jda.core.Permission#MESSAGE_READ Permission.MESSAGE_READ} permission in.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.TextChannel TextChannel}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} with matching id.
*/
TextChannel getTextChannelById(String id);
/**
* This returns the {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} with an id that matches the provided
* one, then this returns {@code null}.
*
* <p><b>Note:</b> just because a {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} is present does
* not mean that you will be able to send messages to it. Furthermore, if you log into this account on the discord
* client, it is you will not see the channel that this returns. This is because the discord client
* hides any {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} that you don't have the
* {@link net.dv8tion.jda.core.Permission#MESSAGE_READ Permission.MESSAGE_READ} permission in.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.TextChannel TextChannel}.
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} with matching id.
*/
TextChannel getTextChannelById(long id);
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.TextChannel TextChannels} that have the same name as the one provided.
* <br>If there are no {@link net.dv8tion.jda.core.entities.TextChannel TextChannels} with the provided name, then this returns an empty list.
*
* <p><b>Note:</b> just because a {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} is present in this list does
* not mean that you will be able to send messages to it. Furthermore, if you log into this account on the discord
* client, it is possible that you will see fewer channels than this returns. This is because the discord client
* hides any {@link net.dv8tion.jda.core.entities.TextChannel TextChannel} that you don't have the
* {@link net.dv8tion.jda.core.Permission#MESSAGE_READ Permission.MESSAGE_READ} permission in.
*
* @param name
* The name of the requested {@link net.dv8tion.jda.core.entities.TextChannel TextChannels}.
* @param ignoreCase
* Whether to ignore case or not when comparing the provided name to each {@link net.dv8tion.jda.core.entities.TextChannel#getName()}.
*
* @return Possibly-empty list of all the {@link net.dv8tion.jda.core.entities.TextChannel TextChannels} that all have the
* same name as the provided name.
*/
List<TextChannel> getTextChannelsByName(String name, boolean ignoreCase);
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels} of all connected
* {@link net.dv8tion.jda.core.entities.Guild Guilds}.
*
* @return Possible-empty list of all known {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels}.
*/
List<VoiceChannel> getVoiceChannels();
/**
* This returns the {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} with an id that matches the provided
* one, then this returns {@code null}.
*
* @param id The id of the {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} with matching id.
*/
VoiceChannel getVoiceChannelById(String id);
/**
* This returns the {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} with an id that matches the provided
* one, then this returns {@code null}.
*
* @param id The id of the {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel}.
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannel} with matching id.
*/
VoiceChannel getVoiceChannelById(long id);
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels} that have the same name as the one provided.
* <br>If there are no {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels} with the provided name, then this returns an empty list.
*
* @param name
* The name of the requested {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels}.
* @param ignoreCase
* Whether to ignore case or not when comparing the provided name to each {@link net.dv8tion.jda.core.entities.VoiceChannel#getName()}.
*
* @return Possibly-empty list of all the {@link net.dv8tion.jda.core.entities.VoiceChannel VoiceChannels} that all have the
* same name as the provided name.
*/
List<VoiceChannel> getVoiceChannelByName(String name, boolean ignoreCase);
/**
* An unmodifiable list of all known {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannels}.
*
* @return Possibly-empty list of all {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannels}.
*/
List<PrivateChannel> getPrivateChannels();
/**
* This returns the {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} with an id that matches the
* provided one, then this returns {@code null}.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} with matching id.
*/
PrivateChannel getPrivateChannelById(String id);
/**
* This returns the {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} which has the same id as the one provided.
* <br>If there is no known {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} with an id that matches the
* provided one, then this returns {@code null}.
*
* @param id
* The id of the {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel}.
*
* @return Possibly-null {@link net.dv8tion.jda.core.entities.PrivateChannel PrivateChannel} with matching id.
*/
PrivateChannel getPrivateChannelById(long id);
/**
* A collection of all to us known emotes (managed/restricted included).
*
* <p><b>Hint</b>: To check whether you can use an {@link net.dv8tion.jda.core.entities.Emote Emote} in a specific
* context you can use {@link Emote#canInteract(net.dv8tion.jda.core.entities.Member)} or
* {@link Emote#canInteract(net.dv8tion.jda.core.entities.User, net.dv8tion.jda.core.entities.MessageChannel)}
*
* <p><b>Unicode emojis are not included as {@link net.dv8tion.jda.core.entities.Emote Emote}!</b>
*
* @return An immutable list of Emotes (which may or may not be available to usage).
*/
List<Emote> getEmotes();
/**
* Retrieves an emote matching the specified {@code id} if one is available in our cache.
*
* <p><b>Unicode emojis are not included as {@link net.dv8tion.jda.core.entities.Emote Emote}!</b>
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.Emote}.
*
* @throws java.lang.NumberFormatException
* If the provided {@code id} cannot be parsed by {@link Long#parseLong(String)}
*
* @return An {@link net.dv8tion.jda.core.entities.Emote Emote} represented by this id or null if none is found in our cache.
*/
Emote getEmoteById(String id);
/**
* Retrieves an emote matching the specified {@code id} if one is available in our cache.
*
* <p><b>Unicode emojis are not included as {@link net.dv8tion.jda.core.entities.Emote Emote}!</b>
*
* @param id
* The id of the requested {@link net.dv8tion.jda.core.entities.Emote}.
*
* @return An {@link net.dv8tion.jda.core.entities.Emote Emote} represented by this id or null if none is found in our cache.
*/
Emote getEmoteById(long id);
/**
* An unmodifiable list of all {@link net.dv8tion.jda.core.entities.Emote Emotes} that have the same name as the one provided.
* <br>If there are no {@link net.dv8tion.jda.core.entities.Emote Emotes} with the provided name, then this returns an empty list.
*
* <p><b>Unicode emojis are not included as {@link net.dv8tion.jda.core.entities.Emote Emote}!</b>
*
* @param name
* The name of the requested {@link net.dv8tion.jda.core.entities.Emote Emotes}.
* @param ignoreCase
* Whether to ignore case or not when comparing the provided name to each {@link net.dv8tion.jda.core.entities.Emote#getName()}.
*
* @return Possibly-empty list of all the {@link net.dv8tion.jda.core.entities.Emote Emotes} that all have the
* same name as the provided name.
*/
List<Emote> getEmotesByName(String name, boolean ignoreCase);
/**
* Returns the currently logged in account represented by {@link net.dv8tion.jda.core.entities.SelfUser SelfUser}.
* <br>Account settings <b>cannot</b> be modified using this object. If you wish to modify account settings please
* use the AccountManager which is accessible by {@link net.dv8tion.jda.core.entities.SelfUser#getManager()} or
* {@link net.dv8tion.jda.core.entities.SelfUser#getManagerUpdatable()}.
*
* @return The currently logged in account.
*/
SelfUser getSelfUser();
/**
* The {@link net.dv8tion.jda.core.managers.Presence Presence} controller for the current session.
* <br>Used to set {@link net.dv8tion.jda.core.entities.Game} and {@link net.dv8tion.jda.core.OnlineStatus} information.
*
* @return The never-null {@link net.dv8tion.jda.core.managers.Presence Presence} for this session.
*/
Presence getPresence();
/**
* The shard information used when creating this instance of JDA.
* <br>Represents the information provided to {@link net.dv8tion.jda.core.JDABuilder#useSharding(int, int)}.
*
* @return The shard information for this shard or {@code null} if this JDA instance isn't sharding.
*/
ShardInfo getShardInfo();
/**
* The login token that is currently being used for Discord authentication.
*
* @return Never-null, 18 character length string containing the auth token.
*/
String getToken();
/**
* This value is the total amount of JSON responses that discord has sent.
* <br>This value resets every time the websocket has to perform a full reconnect (not resume).
*
* @return Never-negative long containing total response amount.
*/
long getResponseTotal();
/**
* This value is the maximum amount of time, in seconds, that JDA will wait between reconnect attempts.
* <br>Can be set using {@link net.dv8tion.jda.core.JDABuilder#setMaxReconnectDelay(int) JDABuilder.setMaxReconnectDelay(int)}.
*
* @return The maximum amount of time JDA will wait between reconnect attempts in seconds.
*/
int getMaxReconnectDelay();
/**
* The proxy settings used by all JDA instances.
*
* @return The proxy settings used by all JDA instances. If JDA currently isn't using a proxy,
* {@link java.net.Proxy#NO_PROXY Proxy.NO_PROXY} is returned.
*/
HttpHost getGlobalProxy();
/**
* Sets whether or not JDA should try to automatically reconnect if a connection-error is encountered.
* <br>This will use an incremental reconnect (timeouts are increased each time an attempt fails).
*
* <p>Default is <b>true</b>.
*
* @param reconnect If true - enables autoReconnect
*/
void setAutoReconnect(boolean reconnect);
/**
* USed to determine whether or not autoReconnect is enabled for JDA.
*
* @return True if JDA will attempt to automatically reconnect when a connection-error is encountered.
*/
boolean isAutoReconnect();
/**
* Used to determine whether the instance of JDA supports audio and has it enabled.
*
* @return True if JDA can currently utilize the audio system.
*/
boolean isAudioEnabled();
/**
* Used to determine if JDA will process MESSAGE_DELETE_BULK messages received from Discord as a single
* {@link net.dv8tion.jda.core.events.message.MessageBulkDeleteEvent MessageBulkDeleteEvent} or split
* the deleted messages up and fire multiple {@link net.dv8tion.jda.core.events.message.MessageDeleteEvent MessageDeleteEvents},
* one for each deleted message.
*
* <p>By default, JDA will separate the bulk delete event into individual delete events, but this isn't as efficient as
* handling a single event would be. It is recommended that BulkDelete Splitting be disabled and that the developer
* should instead handle the {@link net.dv8tion.jda.core.events.message.MessageBulkDeleteEvent MessageBulkDeleteEvent}
*
* @return Whether or not JDA currently handles the BULK_MESSAGE_DELETE event by splitting it into individual MessageDeleteEvents or not.
*/
boolean isBulkDeleteSplittingEnabled();
/**
* Shuts down JDA, closing all its connections.
*
* <p>This is the same as calling {@link #shutdown(boolean) shutdown(true)}.
*/
void shutdown();
/**
* Shuts down JDA, closing all its connections.
* After this command is issued the JDA Instance can not be used anymore.
*
* <p>Depending on the value of {@code free}, this will also close the background-thread used for requests by Unirest.
* <br>If the background-thread is closed, the system can exit properly, but no further JDA requests are possible (includes other JDA instances).
* If you want to create any new instances or if you have any other instances running in parallel, then {@code free}
* should be set to false.
*
* @param free If true, shuts down JDA's rest system permanently for all current and future instances.
*/
void shutdown(boolean free);
/**
* Installs an auxiliary cable into your system.
*
* @param port
* the port to install to.
*
* @throws UnsupportedOperationException
* when you don't read the docs
*/
void installAuxiliaryCable(int port) throws UnsupportedOperationException;
/**
* The {@link net.dv8tion.jda.core.AccountType} of the currently logged in account.
* <br>Used when determining functions that are restricted based on the type of account.
*
* @return The current AccountType.
*/
AccountType getAccountType();
/**
* Used to access Client specific functions like Groups, Calls, and Friends.
*
* @throws net.dv8tion.jda.core.exceptions.AccountTypeException
* Thrown if the currently logged in account is {@link net.dv8tion.jda.core.AccountType#BOT}
*
* @return The {@link net.dv8tion.jda.client.JDAClient} registry for this instance of JDA.
*/
JDAClient asClient();
/**
* Used to access Bot specific functions like OAuth information.
*
* @throws net.dv8tion.jda.core.exceptions.AccountTypeException
* Thrown if the currently logged in account is {@link net.dv8tion.jda.core.AccountType#CLIENT}
*
* @return The {@link net.dv8tion.jda.bot.JDABot} registry for this instance of JDA.
*/
JDABot asBot();
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.settings;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.ui.JavaDebuggerSupport;
import com.intellij.debugger.ui.tree.render.ClassRenderer;
import com.intellij.debugger.ui.tree.render.PrimitiveRenderer;
import com.intellij.debugger.ui.tree.render.ToStringRenderer;
import com.intellij.openapi.options.OptionsBundle;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.project.Project;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.StateRestoringCheckBox;
import com.intellij.ui.classFilter.ClassFilterEditor;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import static java.awt.GridBagConstraints.*;
/**
* @author Eugene Belyaev
*/
public class DebuggerDataViewsConfigurable implements SearchableConfigurable {
private JCheckBox myCbAutoscroll;
private JCheckBox myCbShowSyntheticFields;
private StateRestoringCheckBox myCbShowValFieldsAsLocalVariables;
private JCheckBox myCbHideNullArrayElements;
private JCheckBox myCbShowStatic;
private JCheckBox myCbShowDeclaredType;
private JCheckBox myCbShowFQNames;
private JCheckBox myCbShowObjectId;
private JCheckBox myCbShowStringsType;
private JCheckBox myCbHexValue;
private StateRestoringCheckBox myCbShowStaticFinalFields;
//private final ArrayRendererConfigurable myArrayRendererConfigurable;
private JCheckBox myCbEnableAlternateViews;
private JCheckBox myCbEnableToString;
private JRadioButton myRbAllThatOverride;
private JRadioButton myRbFromList;
private ClassFilterEditor myToStringFilterEditor;
private Project myProject;
public DebuggerDataViewsConfigurable(@Nullable Project project) {
myProject = project;
//myArrayRendererConfigurable = new ArrayRendererConfigurable(NodeRendererSettings.getInstance().getArrayRenderer());
}
@Override
public void disposeUIResources() {
//myArrayRendererConfigurable.disposeUIResources();
myToStringFilterEditor = null;
myProject = null;
}
@Override
public String getDisplayName() {
return OptionsBundle.message("options.java.display.name");
}
@Override
public JComponent createComponent() {
if (myProject == null) {
myProject = JavaDebuggerSupport.getContextProjectForEditorFieldsInDebuggerConfigurables();
}
final JPanel panel = new JPanel(new GridBagLayout());
myCbAutoscroll = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.autoscroll"));
myCbShowSyntheticFields = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.synthetic.fields"));
myCbShowValFieldsAsLocalVariables = new StateRestoringCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.val.fields.as.locals"));
myCbHideNullArrayElements = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.hide.null.array.elements"));
myCbShowStatic = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.static.fields"));
myCbShowStaticFinalFields = new StateRestoringCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.static.final.fields"));
myCbEnableAlternateViews = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.alternate.view"));
myCbShowStatic.addChangeListener(new ChangeListener(){
@Override
public void stateChanged(ChangeEvent e) {
if(myCbShowStatic.isSelected()) {
myCbShowStaticFinalFields.makeSelectable();
}
else {
myCbShowStaticFinalFields.makeUnselectable(false);
}
}
});
myCbShowSyntheticFields.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
if(myCbShowSyntheticFields.isSelected()) {
myCbShowValFieldsAsLocalVariables.makeSelectable();
}
else {
myCbShowValFieldsAsLocalVariables.makeUnselectable(false);
}
}
});
myCbShowDeclaredType = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.declared.type"));
myCbShowFQNames = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.fq.names"));
myCbShowObjectId = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.object.id"));
myCbHexValue = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.hex.value"));
myCbShowStringsType = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.show.strings.type"));
myCbEnableToString = new JCheckBox(DebuggerBundle.message("label.base.renderer.configurable.enable.toString"));
myRbAllThatOverride = new JRadioButton(DebuggerBundle.message("label.base.renderer.configurable.all.overriding"));
myRbFromList = new JRadioButton(DebuggerBundle.message("label.base.renderer.configurable.classes.from.list"));
ButtonGroup group = new ButtonGroup();
group.add(myRbAllThatOverride);
group.add(myRbFromList);
myToStringFilterEditor = new ClassFilterEditor(myProject, null, "reference.viewBreakpoints.classFilters.newPattern");
myCbEnableToString.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
final boolean enabled = myCbEnableToString.isSelected();
myRbAllThatOverride.setEnabled(enabled);
myRbFromList.setEnabled(enabled);
myToStringFilterEditor.setEnabled(enabled && myRbFromList.isSelected());
}
});
myRbFromList.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
myToStringFilterEditor.setEnabled(myCbEnableToString.isSelected() && myRbFromList.isSelected());
}
});
panel.add(myCbAutoscroll, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
final JPanel showPanel = new JPanel(new GridBagLayout());
showPanel.setBorder(IdeBorderFactory.createTitledBorder("Show", true));
showPanel.add(myCbShowDeclaredType, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.emptyInsets(), 0, 0));
showPanel.add(myCbShowObjectId, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
showPanel.add(myCbShowSyntheticFields, new GridBagConstraints(1, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insetsLeft(10), 0, 0));
showPanel.add(myCbShowStatic, new GridBagConstraints(1, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowValFieldsAsLocalVariables, new GridBagConstraints(2, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowStaticFinalFields, new GridBagConstraints(2, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 10, 0, 0), 0, 0));
showPanel.add(myCbShowFQNames, new GridBagConstraints(3, RELATIVE, 1, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(10), 0, 0));
panel.add(showPanel, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
//final JPanel arraysPanel = new JPanel(new BorderLayout(0, UIUtil.DEFAULT_VGAP));
//final JComponent arraysComponent = myArrayRendererConfigurable.createComponent();
//assert arraysComponent != null;
//arraysPanel.add(arraysComponent, BorderLayout.CENTER);
//arraysPanel.add(myCbHideNullArrayElements, BorderLayout.SOUTH);
//arraysPanel.setBorder(IdeBorderFactory.createTitledBorder("Arrays", true));
//panel.add(arraysPanel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 3, 1, 1.0, 0.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
panel.add(myCbShowStringsType, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.emptyInsets(), 0, 0));
panel.add(myCbHexValue, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
panel.add(myCbHideNullArrayElements, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, NORTH, HORIZONTAL, JBUI.insetsTop(4), 0, 0));
panel.add(myCbEnableAlternateViews, new GridBagConstraints(0, RELATIVE, 1, 1, 0.0, 0.0, WEST, NONE, JBUI.insets(4, 0, 0, 10), 0, 0));
// starting 4-th row
panel.add(myCbEnableToString, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsTop(4), 0, 0));
panel.add(myRbAllThatOverride, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(12), 0, 0));
panel.add(myRbFromList, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 0.0, WEST, NONE, JBUI.insetsLeft(12), 0, 0));
myToStringFilterEditor.setMinimumSize(JBUI.size(50, 100));
panel.add(myToStringFilterEditor, new GridBagConstraints(0, RELATIVE, 3, 1, 1.0, 1.0, CENTER, BOTH, JBUI.insetsLeft(12), 0, 0));
return panel;
}
@Override
public void apply() {
final ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
generalSettings.AUTOSCROLL_TO_NEW_LOCALS = myCbAutoscroll.isSelected();
rendererSettings.setAlternateCollectionViewsEnabled(myCbEnableAlternateViews.isSelected());
generalSettings.HIDE_NULL_ARRAY_ELEMENTS = myCbHideNullArrayElements.isSelected();
final ClassRenderer classRenderer = rendererSettings.getClassRenderer();
classRenderer.SHOW_STATIC = myCbShowStatic.isSelected();
classRenderer.SHOW_STATIC_FINAL = myCbShowStaticFinalFields.isSelectedWhenSelectable();
classRenderer.SHOW_SYNTHETICS = myCbShowSyntheticFields.isSelected();
classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES = myCbShowValFieldsAsLocalVariables.isSelectedWhenSelectable();
classRenderer.SHOW_DECLARED_TYPE = myCbShowDeclaredType.isSelected();
classRenderer.SHOW_FQ_TYPE_NAMES = myCbShowFQNames.isSelected();
classRenderer.SHOW_OBJECT_ID = myCbShowObjectId.isSelected();
classRenderer.SHOW_STRINGS_TYPE = myCbShowStringsType.isSelected();
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
toStringRenderer.setEnabled(myCbEnableToString.isSelected());
toStringRenderer.setUseClassFilters(myRbFromList.isSelected());
toStringRenderer.setClassFilters(myToStringFilterEditor.getFilters());
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
primitiveRenderer.setShowHexValue(myCbHexValue.isSelected());
rendererSettings.fireRenderersChanged();
}
@Override
public void reset() {
final ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
myCbAutoscroll.setSelected(generalSettings.AUTOSCROLL_TO_NEW_LOCALS);
myCbHideNullArrayElements.setSelected(generalSettings.HIDE_NULL_ARRAY_ELEMENTS);
myCbEnableAlternateViews.setSelected(rendererSettings.areAlternateCollectionViewsEnabled());
ClassRenderer classRenderer = rendererSettings.getClassRenderer();
myCbShowSyntheticFields.setSelected(classRenderer.SHOW_SYNTHETICS);
myCbShowValFieldsAsLocalVariables.setSelected(classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES);
if (!classRenderer.SHOW_SYNTHETICS) {
myCbShowValFieldsAsLocalVariables.makeUnselectable(false);
}
myCbShowStatic.setSelected(classRenderer.SHOW_STATIC);
myCbShowStaticFinalFields.setSelected(classRenderer.SHOW_STATIC_FINAL);
if(!classRenderer.SHOW_STATIC) {
myCbShowStaticFinalFields.makeUnselectable(false);
}
myCbShowDeclaredType.setSelected(classRenderer.SHOW_DECLARED_TYPE);
myCbShowFQNames.setSelected(classRenderer.SHOW_FQ_TYPE_NAMES);
myCbShowObjectId.setSelected(classRenderer.SHOW_OBJECT_ID);
myCbShowStringsType.setSelected(classRenderer.SHOW_STRINGS_TYPE);
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
final boolean toStringEnabled = toStringRenderer.isEnabled();
final boolean useClassFilters = toStringRenderer.isUseClassFilters();
myCbEnableToString.setSelected(toStringEnabled);
myRbAllThatOverride.setSelected(!useClassFilters);
myRbFromList.setSelected(useClassFilters);
myToStringFilterEditor.setFilters(toStringRenderer.getClassFilters());
myToStringFilterEditor.setEnabled(toStringEnabled && useClassFilters);
myRbFromList.setEnabled(toStringEnabled);
myRbAllThatOverride.setEnabled(toStringEnabled);
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
myCbHexValue.setSelected(primitiveRenderer.isShowHexValue());
}
@Override
public boolean isModified() {
return areGeneralSettingsModified() || areDefaultRenderersModified();
}
private boolean areGeneralSettingsModified() {
ViewsGeneralSettings generalSettings = ViewsGeneralSettings.getInstance();
return generalSettings.AUTOSCROLL_TO_NEW_LOCALS != myCbAutoscroll.isSelected() ||
generalSettings.HIDE_NULL_ARRAY_ELEMENTS != myCbHideNullArrayElements.isSelected();
}
private boolean areDefaultRenderersModified() {
//if (myArrayRendererConfigurable.isModified()) {
// return true;
//}
final NodeRendererSettings rendererSettings = NodeRendererSettings.getInstance();
final ClassRenderer classRenderer = rendererSettings.getClassRenderer();
final boolean isClassRendererModified=
(classRenderer.SHOW_STATIC != myCbShowStatic.isSelected()) ||
(classRenderer.SHOW_STATIC_FINAL != myCbShowStaticFinalFields.isSelectedWhenSelectable()) ||
(classRenderer.SHOW_SYNTHETICS != myCbShowSyntheticFields.isSelected()) ||
(classRenderer.SHOW_VAL_FIELDS_AS_LOCAL_VARIABLES != myCbShowValFieldsAsLocalVariables.isSelectedWhenSelectable()) ||
(classRenderer.SHOW_DECLARED_TYPE != myCbShowDeclaredType.isSelected()) ||
(classRenderer.SHOW_FQ_TYPE_NAMES != myCbShowFQNames.isSelected()) ||
(classRenderer.SHOW_OBJECT_ID != myCbShowObjectId.isSelected()) ||
(classRenderer.SHOW_STRINGS_TYPE != myCbShowStringsType.isSelected());
if (isClassRendererModified) {
return true;
}
final ToStringRenderer toStringRenderer = rendererSettings.getToStringRenderer();
final boolean isToStringRendererModified =
(toStringRenderer.isEnabled() != myCbEnableToString.isSelected()) ||
(toStringRenderer.isUseClassFilters() != myRbFromList.isSelected()) ||
(!DebuggerUtilsEx.filterEquals(toStringRenderer.getClassFilters(), myToStringFilterEditor.getFilters()));
if (isToStringRendererModified) {
return true;
}
if (rendererSettings.areAlternateCollectionViewsEnabled() != myCbEnableAlternateViews.isSelected()) {
return true;
}
PrimitiveRenderer primitiveRenderer = rendererSettings.getPrimitiveRenderer();
if (primitiveRenderer.isShowHexValue() != myCbHexValue.isSelected()) {
return true;
}
return false;
}
@SuppressWarnings("SpellCheckingInspection")
@Override
@NotNull
public String getHelpTopic() {
return "reference.idesettings.debugger.dataviews";
}
@Override
@NotNull
public String getId() {
return getHelpTopic();
}
}
| |
/*
Derby - Class org.apache.derbyTesting.unitTests.store.T_FileSystemData
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.unitTests.store;
import org.apache.derby.impl.store.raw.data.*;
import org.apache.derbyTesting.unitTests.harness.T_MultiThreadedIterations;
import org.apache.derbyTesting.unitTests.harness.T_Fail;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.iapi.services.locks.*;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.services.io.Storable;
import org.apache.derby.iapi.services.property.PropertyUtil;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.store.raw.*;
import org.apache.derby.iapi.store.raw.xact.RawTransaction;
import org.apache.derby.iapi.store.raw.data.RawContainerHandle;
import org.apache.derby.iapi.store.access.conglomerate.LogicalUndo;
import org.apache.derby.iapi.reference.Property;
import java.io.*;
import java.util.Properties;
/**
An Impl unittest for rawstore data that is based on the FileSystem
*/
public class T_FileSystemData extends T_MultiThreadedIterations {
private static final String testService = "fileSystemDataTest";
static final String REC_001 = "McLaren";
static final String REC_002 = "Ferrari";
static final String REC_003 = "Benetton";
static final String REC_004 = "Prost";
static final String REC_005 = "Tyrell";
static final String REC_006 = "Derby, Natscape, Goatscape, the popular names";
static final String REC_007 = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz";
static final String SP1 = "savepoint1";
static final String SP2 = "savepoint2";
static RawStoreFactory factory;
static LockFactory lf;
static long commonContainer = -1;
static boolean testRollback; // initialize in start
static final String TEST_ROLLBACK_OFF = "derby.RawStore.RollbackTestOff";
private static ContextService contextService;
private T_Util t_util;
public T_FileSystemData()
{
super();
}
/**
@exception StandardException cannot startup the context service
*/
public void boot(boolean create, Properties startParams)
throws StandardException
{
super.boot(create, startParams);
contextService = ContextService.getFactory();
}
/*
** Methods required by T_Generic
*/
protected String getModuleToTestProtocolName() {
return RawStoreFactory.MODULE;
}
/**
Run the tests
@exception T_Fail Unexpected behaviour from the API
*/
protected void setupTest() throws T_Fail
{
String rollbackOff = PropertyUtil.getSystemProperty(TEST_ROLLBACK_OFF);
testRollback = !Boolean.valueOf(rollbackOff).booleanValue();
// don't automatic boot this service if it gets left around
if (startParams == null) {
startParams = new Properties();
}
// see if we are testing encryption
startParams = T_Util.setEncryptionParam(startParams);
startParams.put(Property.NO_AUTO_BOOT, Boolean.TRUE.toString());
// remove the service directory to ensure a clean run
startParams.put(Property.DELETE_ON_CREATE, Boolean.TRUE.toString());
try {
factory = (RawStoreFactory) Monitor.createPersistentService(getModuleToTestProtocolName(),
testService, startParams);
if (factory == null) {
throw T_Fail.testFailMsg(getModuleToTestProtocolName() + " service not started.");
}
lf = factory.getLockFactory();
if (lf == null) {
throw T_Fail.testFailMsg("LockFactory.MODULE not found");
}
} catch (StandardException mse) {
throw T_Fail.exceptionFail(mse);
}
t_util = new T_Util(factory, lf, contextService);
commonContainer = commonContainer();
return;
}
/**
* T_MultiThreadedIteration method
*
* @exception T_Fail Unexpected behaviour from the API
*/
protected void joinSetupTest() throws T_Fail {
T_Fail.T_ASSERT(factory != null, "raw store factory not setup ");
T_Fail.T_ASSERT(contextService != null, "Context service not setup ");
T_Fail.T_ASSERT(commonContainer != -1, "common container not setup ");
t_util = new T_Util(factory, lf, contextService);
}
protected T_MultiThreadedIterations newTestObject() {
return new T_FileSystemData();
}
/**
run the test
@exception T_Fail Unexpected behaviour from the API
*/
protected void runTestSet() throws T_Fail {
// get a utility helper
ContextManager cm1 = contextService.newContextManager();
contextService.setCurrentContextManager(cm1);
try {
runCostEstimationTests();
runAllocationTests();
} catch (StandardException se) {
cm1.cleanupOnError(se);
throw T_Fail.exceptionFail(se);
}
finally {
contextService.resetCurrentContextManager(cm1);
}
}
/*
* create a container that all threads can use
*/
private long commonContainer() throws T_Fail
{
ContextManager cm1 = contextService.newContextManager();
contextService.setCurrentContextManager(cm1);
long cid;
try {
Transaction t = t_util.t_startTransaction();
cid = t_util.t_addContainer(t, 0);
t_util.t_commit(t);
t.close();
}
catch (StandardException se) {
cm1.cleanupOnError(se);
throw T_Fail.exceptionFail(se);
}
finally {
contextService.resetCurrentContextManager(cm1);
}
return cid;
}
protected void runCostEstimationTests() throws T_Fail, StandardException
{
CostEstimationTest1();
}
protected void runAllocationTests() throws T_Fail, StandardException
{
// don't run these for > 2 threads
if (threadNumber < 2)
{
AllocTest1(); // test remove and reuse of page
AllocTest2(); // test remove and drop and rollback of remove
AllocTest3(); // test multiple alloc page
AllocTest4(); // test preallocation
}
// can't get this test to pass consistently because it depends on
// timing of the cache.
// AllocTest5(); // test gettting 1/2 filled page for insert
AllocMTest1(commonContainer); // test multi thread access to the same container
}
/**
@exception T_Fail Unexpected behaviour from the API
@exception StandardException Standard Derby error policy
*/
protected void CostEstimationTest1() throws StandardException, T_Fail
{
// getEstimatedRowCount(0), setEstimatedRowCount(long count, int flag),
// getEstimatedPageCount(int flag);
Transaction t = t_util.t_startTransaction();
long cid = t_util.t_addContainer(t, 0);
t_util.t_commit(t);
ContainerHandle c = t_util.t_openContainer(t, 0, cid, true);
try
{
int numRows = 10;
T_RawStoreRow row = new T_RawStoreRow(REC_001);
RecordHandle rh[] = new RecordHandle[numRows];
// insert numRows rows into container
for (int i = 0; i < numRows; i++)
rh[i] = t_util.t_insert(c, row);
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
if ((c.getEstimatedRowCount(0) != numRows) &&
(c.getEstimatedRowCount(0) != (numRows - 1)))
{
// due to timing, sometimes estimate row count is 9 rather than
// 10.
throw T_Fail.testFailMsg(
"expect estimated row count to be " + (numRows - 1) +
" or " + numRows +
", got " + c.getEstimatedRowCount(0));
}
// now update them that cause overflowing - expect the same row count
T_RawStoreRow longRow = new T_RawStoreRow(REC_007);
for (int i = 0; i < numRows; i++)
t_util.t_update(c, rh[i], longRow);
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
if (c.getEstimatedRowCount(0) != numRows)
if ((c.getEstimatedRowCount(0) != numRows) &&
(c.getEstimatedRowCount(0) != (numRows - 1)))
{
// due to timing, sometimes estimate row count is 9 rather than
// 10.
throw T_Fail.testFailMsg(
"expect after update same estimated row count, but it is not." +
"expect estimated row count to be " + (numRows - 1) +
" or " + numRows + ", got " + c.getEstimatedRowCount(0));
}
// now focibly set the row count
c.setEstimatedRowCount(2*numRows, 0);
if (c.getEstimatedRowCount(0) != 2*numRows)
throw T_Fail.testFailMsg("forcibly setting estimated row count doesn't seem to work");
// now purge some rows, this should alter the row count.
Page p = null;
long pnum = 0;
long purgedCount = 0;
for (p = c.getFirstPage(); p != null; p = c.getNextPage(pnum))
{
int rcount = p.recordCount()/3;
pnum = p.getPageNumber();
p.deleteAtSlot(0, true, (LogicalUndo)null);
p.purgeAtSlot(rcount, rcount, true); // purget the middle 1/3 of the page
purgedCount += rcount + 1;
p.unlatch();
}
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
if (c.getEstimatedRowCount(0) != (2*numRows - purgedCount))
throw T_Fail.testFailMsg("expect " + (2*numRows-purgedCount) +
" after purge");
// now get rid of some pages to alter the row count
REPORT("before page delete, estRC = " + (2*numRows) + " - " + purgedCount);
for (p = c.getFirstPage(); p != null; p = c.getNextPage(pnum))
{
pnum = p.getPageNumber();
if ((pnum%2) == 0)
{
purgedCount += p.nonDeletedRecordCount();
c.removePage(p);
}
else
p.unlatch();
}
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
if (c.getEstimatedRowCount(0) != (2*numRows - purgedCount))
throw T_Fail.testFailMsg("expect " + (2*numRows-purgedCount) +
" after page remove, got " + c.getEstimatedRowCount(0));
PASS("CostEstimationTest1");
}
finally
{
t_util.t_commit(t);
t.close();
}
}
protected void AllocTest1() throws StandardException, T_Fail
{
/**
test remove and reuse of page
*/
Transaction t = t_util.t_startTransaction();
try
{
long cid = t_util.t_addContainer(t, 0);
t_util.t_commit(t);
ContainerHandle c = t_util.t_openContainer(t, 0, cid, true);
// create 5 pages, each insert a row into it, then remove 2 of them
Page page1 = t_util.t_getPage(c, ContainerHandle.FIRST_PAGE_NUMBER);
long p1 = page1.getPageNumber();
T_RawStoreRow row1 = new T_RawStoreRow(REC_001);
t_util.t_insert(page1, row1);
Page page2 = t_util.t_addPage(c);
long p2 = page2.getPageNumber();
T_RawStoreRow row2 = new T_RawStoreRow(REC_002);
int rid2 = t_util.t_insert(page2, row2).getId();
Page page3 = t_util.t_addPage(c);
long p3 = page3.getPageNumber();
T_RawStoreRow row3 = new T_RawStoreRow(REC_003);
t_util.t_insert(page3, row3);
Page page4 = t_util.t_addPage(c);
long p4 = page4.getPageNumber();
T_RawStoreRow row4 = new T_RawStoreRow(REC_004);
int rid4 = t_util.t_insert(page4, row4).getId();
Page page5 = t_util.t_addPage(c);
long p5 = page5.getPageNumber();
T_RawStoreRow row5 = new T_RawStoreRow(REC_005);
t_util.t_insert(page5, row5);
t_util.t_removePage(c, page2);
t_util.t_removePage(c, page4);
t_util.t_commit(t);
// now all the pages are unlatched
// pages 2, 4 has been removed, pages 1, 3, 5 has not
// make sure pages that are removed cannot be found again
c = t_util.t_openContainer(t, 0, cid, true);
if (SanityManager.DEBUG)
SanityManager.DEBUG("SpaceTrace", "containeropened");
Page p = c.getFirstPage();
if (p == null)
throw T_Fail.testFailMsg("get first page failed: expect " + p1 + " got null");
if (p.getPageNumber() != p1)
throw T_Fail.testFailMsg("get first page failed: expect " + p1
+ " got " + p.getPageNumber());
t_util.t_commit(t);
// closing the transaction many times to see if we can get the
// deallocated page to free
c = t_util.t_openContainer(t, 0, cid, true);
p = c.getNextPage(p1);
if (p == null || p.getPageNumber() != p3)
throw T_Fail.testFailMsg("get next page failed");
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
p = c.getNextPage(p3);
if (p == null || p.getPageNumber() != p5)
throw T_Fail.testFailMsg("get next page failed");
t_util.t_commit(t);
c = t_util.t_openContainer(t, 0, cid, true);
p = t_util.t_getLastPage(c); // make sure it skips over p5
if (p == null || p.getPageNumber() != p5)
throw T_Fail.testFailMsg("getLastPage failed");
t_util.t_commit(t);
// see if we can get any deallocated page back in 10 attempts
// of add page
int tries = 100;
T_RawStoreRow row6 = new T_RawStoreRow(REC_001);
long pnums[] = new long[tries];
int rids[] = new int[tries];
pnums[0] = p2; // pages 2 and 4 have been removed for a long time
rids[0] = rid2;
pnums[1] = p4;
rids[1] = rid4;
int match = -1;
int i;
for (i = 2 ; match < 0 && i < tries; i++)
{
c = t_util.t_openContainer(t, 0, cid, true);
p = t_util.t_addPage(c);
pnums[i] = p.getPageNumber();
for (int j = 0; j < i-1; j++)
{
if (pnums[j] == pnums[i])
{
match = j;
break;
}
}
if (match >= 0)
{
// p is a reused one, make sure it is empty
t_util.t_checkEmptyPage(p);
RecordHandle rh = t_util.t_insert(p, row6);
if (rh.getId() == rids[match])
throw T_Fail.testFailMsg("reused page recordId is not preserved");
break;
}
else
rids[i] = t_util.t_insert(p, row6).getId();
t_util.t_removePage(c, p);
t_util.t_commit(t);
}
t_util.t_dropContainer(t, 0, cid); // cleanup
if (match >= 0)
PASS("AllocTest1 success in " + i + " tries");
else
REPORT("AllocTest1 Not successful in " + i +
" tries. This is a timing depenedent test so this is not necessarily an indication of failure.");
}
finally
{
t_util.t_commit(t);
t.close();
}
}
protected void AllocTest2() throws StandardException, T_Fail
{
/**
More Test remove and reuse of page
*/
Transaction t = t_util.t_startTransaction();
int numpages = 30;
try
{
long cid = t_util.t_addContainer(t, 0);
ContainerHandle c = t_util.t_openContainer(t, 0, cid, true);
Page[] page = new Page[numpages];
for (int i = 0; i < numpages; i++)
{
page[i] = t_util.t_addPage(c);
t_util.t_removePage(c, page[i]);
}
// make sure a dropped container does not cause problem for page
// that's been removed
t_util.t_dropContainer(t, 0, cid);
t_util.t_commit(t);
if (testRollback)
{
cid = t_util.t_addContainer(t, 0);
c = t_util.t_openContainer(t, 0, cid, true);
for (int i = 0; i < numpages; i++)
{
page[i] = t_util.t_addPage(c);
t_util.t_removePage(c, page[i]);
}
t_util.t_abort(t);
}
}
finally
{
t_util.t_commit(t);
t.close();
}
PASS("AllocTest2");
}
protected void AllocTest3() throws StandardException, T_Fail
{
/* test multiple alloc pages */
if (!SanityManager.DEBUG)
{
REPORT("allocTest3 cannot be run on an insane server");
return;
}
else
{
SanityManager.DEBUG_SET(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
Transaction t = t_util.t_startTransaction();
try
{
long cid = t_util.t_addContainer(t, 0);
t_util.t_commit(t);
ContainerHandle c = t_util.t_openContainer(t, 0, cid, true);
T_RawStoreRow row = new T_RawStoreRow(REC_001);
int numrows = 10; // create 10 pages with 1 row each
String threadName = Thread.currentThread().getName();
Page page;
for (int i = 0; i < numrows; i++)
{
page = t_util.t_addPage(c);
t_util.t_insert(page, row);
page.unlatch();
}
int checkrows = 0;
long pnum;
for (page = c.getFirstPage();
page != null;
page = c.getNextPage(pnum))
{
pnum = page.getPageNumber();
if (page.recordCount() > 0)
{
t_util.t_checkFetchFirst(page, REC_001);
checkrows++;
}
page.unlatch();
}
if (checkrows != numrows)
throw T_Fail.testFailMsg("number of rows differ");
t.setSavePoint(SP1, null);
// now remove 1/2 of the pages and check results
int removedPages = 0;
for (page = c.getFirstPage();
page != null;
page = c.getNextPage(pnum))
{
pnum = page.getPageNumber();
if ((pnum % 2) == 0)
{
t_util.t_removePage(c, page);
removedPages++;
}
else
page.unlatch();
}
checkrows = 0;
for (page = c.getFirstPage();
page != null;
page = c.getNextPage(pnum))
{
pnum = page.getPageNumber();
if (page.recordCount() > 0)
{
t_util.t_checkFetchFirst(page, REC_001);
checkrows++;
}
page.unlatch();
}
if (checkrows != numrows - removedPages)
throw T_Fail.testFailMsg("number of rows differ");
// remove every page backwards
long lastpage = ContainerHandle.INVALID_PAGE_NUMBER;
while((page = t_util.t_getLastPage(c)) != null) // remove the last page
{
if (lastpage == page.getPageNumber())
throw T_Fail.testFailMsg("got a removed last page");
lastpage = page.getPageNumber();
t_util.t_removePage(c, page);
}
if (c.getFirstPage() != null)
throw T_Fail.testFailMsg("get last page returns null but get fisrt page retuns a page");
t.rollbackToSavePoint(SP1, null); // roll back removes
c = t_util.t_openContainer(t, 0, cid, true);
checkrows = 0;
for (page = c.getFirstPage();
page != null;
page = c.getNextPage(pnum))
{
pnum = page.getPageNumber();
if (page.recordCount() > 0)
{
t_util.t_checkFetchFirst(page, REC_001);
checkrows++;
}
page.unlatch();
}
if (checkrows != numrows)
throw T_Fail.testFailMsg(threadName + "number of rows differ expect " +
numrows + " got " + checkrows);
t_util.t_abort(t); // abort the whole thing, no rows left
c = t_util.t_openContainer(t, 0, cid, true);
int countPages = 0;
for (page = c.getFirstPage();
page != null;
page = c.getNextPage(pnum))
{
countPages++;
pnum = page.getPageNumber();
if (page.nonDeletedRecordCount() > 0)
{
throw T_Fail.testFailMsg("failed to remove everything " +
page.nonDeletedRecordCount() +
" rows left on page " + pnum);
}
page.unlatch();
}
if (countPages < numrows)
throw T_Fail.testFailMsg("rollback of user transaction should not remove allocated pages");
t_util.t_dropContainer(t, 0, cid);
}
finally
{
SanityManager.DEBUG_CLEAR(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
t_util.t_commit(t);
t.close();
}
PASS("AllocTest3");
}
}
protected void AllocTest4() throws StandardException, T_Fail
{
if (!SanityManager.DEBUG)
{
REPORT("allocTest3 cannot be run on an insane server");
return;
}
else
{
SanityManager.DEBUG_SET(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
Transaction t = t_util.t_startTransaction();
try
{
////////////////////////////////////////////////////////
// first test preallocation large table
////////////////////////////////////////////////////////
Properties tableProperties = new Properties();
tableProperties.put(Property.PAGE_SIZE_PARAMETER, Integer.toString(1024));
tableProperties.put(RawStoreFactory.CONTAINER_INITIAL_PAGES, Integer.toString(100));
long cid1 =
t.addContainer(
0, ContainerHandle.DEFAULT_ASSIGN_ID,
ContainerHandle.MODE_DEFAULT, tableProperties, 0);
if (cid1 < 0)
throw T_Fail.testFailMsg("addContainer");
ContainerHandle c1 = t_util.t_openContainer(t, 0, cid1, true);
Page p1 = c1.getFirstPage();
if (p1.getPageNumber() != ContainerHandle.FIRST_PAGE_NUMBER)
throw T_Fail.testFailMsg("expect first page to have FIRST_PAGE_NUMBER");
p1.unlatch();
if (c1.getNextPage(ContainerHandle.FIRST_PAGE_NUMBER) != null)
throw T_Fail.testFailMsg("expect to have only 1 page allocated");
t_util.t_commit(t);
REPORT("AllocTest4 - create preallocated container " + cid1);
////////////////////////////////////////////////////////
// next test special addpage interface
////////////////////////////////////////////////////////
long cid2 = t_util.t_addContainer(t, 0, 1024, 0, 1, false);
t_util.t_commit(t);
ContainerHandle c2 = t_util.t_openContainer(t, 0, cid2, true);
// add page for bulk load
p1 = c2.addPage(ContainerHandle.ADD_PAGE_BULK);
long pnum1 = p1.getPageNumber();
p1.unlatch();
// since the interface does not guarentee that anything special will
// actually happen, can't really test that. Just make sure that
// everything else works
Page p2 = c2.addPage();
long pnum2 = p2.getPageNumber();
p2.unlatch();
Page p3 = c2.addPage(ContainerHandle.ADD_PAGE_BULK);
long pnum3 = p3.getPageNumber();
p3.unlatch();
Page p = c2.getFirstPage(); // this is the first page that came with the
// container when it was created
try
{
long pnum0 = p.getPageNumber();
p.unlatch();
p = c2.getNextPage(pnum0);
if (p.getPageNumber() != pnum1)
throw T_Fail.testFailMsg("expected pagenum " + pnum1 + " got " + p.getPageNumber());
p.unlatch();
p = null;
p = c2.getNextPage(pnum1);
if (p.getPageNumber() != pnum2)
throw T_Fail.testFailMsg("expected pagenum " + pnum2 + " got " + p.getPageNumber());
p.unlatch();
p = null;
p = c2.getNextPage(pnum2);
if (p.getPageNumber() != pnum3)
throw T_Fail.testFailMsg("expected pagenum " + pnum3 + " got " + p.getPageNumber());
p.unlatch();
p = null;
p = c2.getNextPage(pnum3);
if (p != null)
throw T_Fail.testFailMsg("expected null page after " + pnum3 +
" got " + p.getPageNumber());
// make sure rollback is unaffected
if (testRollback)
{
t_util.t_abort(t);
c2 = t_util.t_openContainer(t, 0, cid2, true);
p = t_util.t_getPage(c2, pnum0);
t_util.t_checkEmptyPage(p);
p.unlatch();
p = null;
p = t_util.t_getPage(c2, pnum1);
t_util.t_checkEmptyPage(p);
p.unlatch();
p = null;
p = t_util.t_getPage(c2, pnum2);
t_util.t_checkEmptyPage(p);
p.unlatch();
p = null;
p = t_util.t_getPage(c2, pnum3);
t_util.t_checkEmptyPage(p);
p.unlatch();
p = null;
p = t_util.t_getLastPage(c2);
if (p.getPageNumber() != pnum3)
throw T_Fail.testFailMsg("expect last page to be " + pnum3
+ " got " + p.getPageNumber());
p.unlatch();
p = null;
}
}
finally
{
if (p != null)
p.unlatch();
p = null;
}
REPORT("AllocTest4 - special addPage interface " + cid2);
////////////////////////////////////////////////////////
// next test preallocate interface
////////////////////////////////////////////////////////
long cid3 = t_util.t_addContainer(t, 0, 1024);
ContainerHandle c3 = t_util.t_openContainer(t, 0, cid3, true);
// now preallocate 10 pages
c3.preAllocate(10);
p1 = c3.getFirstPage();
if (p1.getPageNumber() != ContainerHandle.FIRST_PAGE_NUMBER)
throw T_Fail.testFailMsg("expect first page to have FIRST_PAGE_NUMBER");
p1.unlatch();
if (c3.getNextPage(ContainerHandle.FIRST_PAGE_NUMBER) != null)
throw T_Fail.testFailMsg("expect to have only 1 page allocated");
REPORT("AllocTest4 - preallocate interface " + cid3);
PASS("AllocTest4 ");
}
finally
{
SanityManager.DEBUG_CLEAR(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
t_util.t_commit(t);
t.close();
}
}
}
protected void AllocTest5() throws StandardException, T_Fail
{
// first create 10 1/2 filled pages with various degree of fillness
Transaction t = t_util.t_startTransaction();
try
{
long cid = t_util.t_addContainer(t, 0, 1024, 0, 90, false);
ContainerHandle c = t_util.t_openContainer(t, 0, cid, true);
Page p;
// the number of rows that is expected to fit into one page
// secret raw store calculation for 1 column rows
int numRows = (1024-60)/(95+8);
T_RawStoreRow rows[] = new T_RawStoreRow[numRows];
for (int j = 0; j < numRows; j++)
rows[j] = new T_RawStoreRow("row " + j);
for (int i = 0; i < numRows; i++)
{
p = t_util.t_addPage(c);
// validate allocation cache by getting the first page
t_util.t_getPage(c, 1).unlatch();
// insert different number of rows into these pages
for (int j = 0; j <= i; j++)
{
if (t_util.t_insert(p, rows[j]) == null)
throw T_Fail.testFailMsg("failed to insert " + (j+1) +
" rows into page " + p);
}
p.unlatch();
}
// page 1 has 0 row
// page 2 has 1 row
// page 3 has 2 rows
// page 4 has 3 rows
// page 5 has 4 rows
// page 6 has 5 rows (filled)
// page 7 has 6 rows (filled)
// page 8 has 7 rows (filled)
// page 9 has 8 rows (filled)
// page 10 has 9 rows (filled)
// these pages should be accounted for correctly because each
// subsequent page has > 1/8 for all the records in the container
// now go thru and use up all the space
p = c.getPageForInsert(0);
if (p != null)
throw T_Fail.testFailMsg("Expect last page to be full");
// now go thru and use up all the space - since we skipped page 1
// on the first loop, it won't know it is a 1/2 filled page.
for (int i = 2; i < 6; i++)
{
p = c.getPageForInsert(ContainerHandle.GET_PAGE_UNFILLED);
if (p == null)
throw T_Fail.testFailMsg("Expect next unfilled page to be " + i);
if (p.getPageNumber() != i)
throw T_Fail.testFailMsg("Expect next unfilled page to be "
+ i + ", it is " + p.getPageNumber());
t_util.t_insert(p, rows[i]);
p.unlatch();
// we should keep getting the same page back until it is full
while ((p = c.getPageForInsert(0)) != null)
{
if (p.getPageNumber() != i)
throw T_Fail.testFailMsg("Don't expect page number to change from " +
i + " to " + p.getPageNumber());
t_util.t_insert(p, rows[i]);
p.unlatch();
}
}
p = c.getPageForInsert(ContainerHandle.GET_PAGE_UNFILLED);
if (p != null)
throw T_Fail.testFailMsg("don't expect any more pages to be found");
}
finally
{
t_util.t_commit(t);
t.close();
}
PASS("AllocTest5 ");
}
/*
* MT tests on the same container
*/
protected void AllocMTest1(long cid) throws StandardException, T_Fail
{
if (SanityManager.DEBUG)
{
SanityManager.DEBUG_SET(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
// each thread will add N pages and remove N pages and still finds
// its own pages. Do that serveral times.
int N = 20;
RecordHandle rh[] = new RecordHandle[N];
Transaction t = t_util.t_startTransaction();
try
{
T_RawStoreRow row = new T_RawStoreRow(REC_002);
ContainerHandle c;
Page p;
for (int iteration = 0; iteration < 5; iteration++)
{
for (int i = 0; i < N; i++)
{
c = t_util.t_openContainer(t, 0, cid, true);
p = t_util.t_addPage(c);
rh[i] = t_util.t_insert(p, row);
p.unlatch();
t_util.t_commit(t);
}
for (int i = 0; i < N; i++)
{
c = t_util.t_openContainer(t, 0, cid, true);
t_util.t_checkFetch(c, rh[i], REC_002);
t.setSavePoint(SP1, null);
p = t_util.t_getPage(c, rh[i].getPageNumber());
t_util.t_removePage(c, p);
if ((iteration%3) == 1)
{
t.rollbackToSavePoint(SP1, null);
}
// sometimes commit sometimes abort
if (iteration % 2 == 0)
t_util.t_abort(t);
else
t_util.t_commit(t);
}
// if I aborted, remove them now
if ((iteration % 2) == 0 ||
(iteration % 3) == 1)
{
for (int i = 0; i < N; i++)
{
c = t_util.t_openContainer(t, 0, cid, true);
t_util.t_checkFetch(c, rh[i], REC_002);
p = t_util.t_getPage(c, rh[i].getPageNumber());
t_util.t_removePage(c, p);
t_util.t_commit(t);
}
}
// at any given time, there should be <= (N*numthread)+1 pages
int max = (N*getNumThreads())+1;
c = t_util.t_openContainer(t, 0, cid, false);
long pnum = 0;
int countPages = 0;
for (p = c.getFirstPage();
p != null;
p = c.getNextPage(pnum))
{
countPages++;
pnum = p.getPageNumber();
p.unlatch();
t_util.t_commit(t); // release container lock
c = t_util.t_openContainer(t, 0, cid, false);
}
t_util.t_commit(t); // release container lock
if (countPages > max)
throw T_Fail.testFailMsg("some pages may not be reused, expect " +
max + " got " + countPages);
else
REPORT("AllocMTest1 got " + countPages );
}
}
finally
{
SanityManager.DEBUG_CLEAR(AllocPage.TEST_MULTIPLE_ALLOC_PAGE);
t_util.t_commit(t);
t.close();
}
PASS("AllocMTest1");
}
else
{
REPORT("AllocMTest1 cannot be run on an insane server");
return;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.library.common.shuffle.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import javax.crypto.SecretKey;
import com.google.common.annotations.VisibleForTesting;
import org.apache.tez.http.HttpConnectionParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.tez.common.CallableWithNdc;
import org.apache.tez.common.TezRuntimeFrameworkConfigs;
import org.apache.tez.common.TezUtilsInternal;
import org.apache.tez.common.counters.TaskCounter;
import org.apache.tez.common.counters.TezCounter;
import org.apache.tez.common.security.JobTokenSecretManager;
import org.apache.tez.dag.api.TezConstants;
import org.apache.tez.dag.api.TezUncheckedException;
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.InputContext;
import org.apache.tez.runtime.api.events.InputReadErrorEvent;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.common.InputAttemptIdentifier;
import org.apache.tez.runtime.library.common.InputIdentifier;
import org.apache.tez.runtime.library.common.TezRuntimeUtils;
import org.apache.tez.runtime.library.common.shuffle.FetchResult;
import org.apache.tez.runtime.library.common.shuffle.FetchedInput;
import org.apache.tez.runtime.library.common.shuffle.FetchedInputAllocator;
import org.apache.tez.runtime.library.common.shuffle.Fetcher;
import org.apache.tez.runtime.library.common.shuffle.FetcherCallback;
import org.apache.tez.runtime.library.common.shuffle.InputHost;
import org.apache.tez.runtime.library.common.shuffle.ShuffleUtils;
import org.apache.tez.runtime.library.common.shuffle.FetchedInput.Type;
import org.apache.tez.runtime.library.common.shuffle.Fetcher.FetcherBuilder;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
// This only knows how to deal with a single srcIndex for a given targetIndex.
// In case the src task generates multiple outputs for the same target Index
// (multiple src-indices), modifications will be required.
public class ShuffleManager implements FetcherCallback {
private static final Logger LOG = LoggerFactory.getLogger(ShuffleManager.class);
private final InputContext inputContext;
private final int numInputs;
private final DecimalFormat mbpsFormat = new DecimalFormat("0.00");
private final FetchedInputAllocator inputManager;
private final ListeningExecutorService fetcherExecutor;
private final ListeningExecutorService schedulerExecutor;
private final RunShuffleCallable schedulerCallable;
private final BlockingQueue<FetchedInput> completedInputs;
private final AtomicBoolean inputReadyNotificationSent = new AtomicBoolean(false);
private final Set<InputIdentifier> completedInputSet;
private final ConcurrentMap<String, InputHost> knownSrcHosts;
private final BlockingQueue<InputHost> pendingHosts;
private final Set<InputAttemptIdentifier> obsoletedInputs;
private Set<Fetcher> runningFetchers;
private final AtomicInteger numCompletedInputs = new AtomicInteger(0);
private final AtomicInteger numFetchedSpills = new AtomicInteger(0);
private final long startTime;
private long lastProgressTime;
private long totalBytesShuffledTillNow;
// Required to be held when manipulating pendingHosts
private final ReentrantLock lock = new ReentrantLock();
private final Condition wakeLoop = lock.newCondition();
private final int numFetchers;
private final boolean asyncHttp;
// Parameters required by Fetchers
private final JobTokenSecretManager jobTokenSecretMgr;
private final CompressionCodec codec;
private final boolean localDiskFetchEnabled;
private final boolean sharedFetchEnabled;
private final int ifileBufferSize;
private final boolean ifileReadAhead;
private final int ifileReadAheadLength;
private final String srcNameTrimmed;
private final int maxTaskOutputAtOnce;
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final TezCounter shuffledInputsCounter;
private final TezCounter failedShufflesCounter;
private final TezCounter bytesShuffledCounter;
private final TezCounter decompressedDataSizeCounter;
private final TezCounter bytesShuffledToDiskCounter;
private final TezCounter bytesShuffledToMemCounter;
private final TezCounter bytesShuffledDirectDiskCounter;
private volatile Throwable shuffleError;
private final HttpConnectionParams httpConnectionParams;
private final LocalDirAllocator localDirAllocator;
private final RawLocalFileSystem localFs;
private final Path[] localDisks;
private final String localhostName;
private final int shufflePort;
private final TezCounter shufflePhaseTime;
private final TezCounter firstEventReceived;
private final TezCounter lastEventReceived;
//To track shuffleInfo events when finalMerge is disabled OR pipelined shuffle is enabled in source.
@VisibleForTesting
final Map<InputIdentifier, ShuffleEventInfo> shuffleInfoEventsMap;
// TODO More counters - FetchErrors, speed?
public ShuffleManager(InputContext inputContext, Configuration conf, int numInputs,
int bufferSize, boolean ifileReadAheadEnabled, int ifileReadAheadLength,
CompressionCodec codec, FetchedInputAllocator inputAllocator) throws IOException {
this.inputContext = inputContext;
this.numInputs = numInputs;
this.shuffledInputsCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_SHUFFLED_INPUTS);
this.failedShufflesCounter = inputContext.getCounters().findCounter(TaskCounter.NUM_FAILED_SHUFFLE_INPUTS);
this.bytesShuffledCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES);
this.decompressedDataSizeCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_DECOMPRESSED);
this.bytesShuffledToDiskCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_DISK);
this.bytesShuffledToMemCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_TO_MEM);
this.bytesShuffledDirectDiskCounter = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_BYTES_DISK_DIRECT);
this.ifileBufferSize = bufferSize;
this.ifileReadAhead = ifileReadAheadEnabled;
this.ifileReadAheadLength = ifileReadAheadLength;
this.codec = codec;
this.inputManager = inputAllocator;
this.localDiskFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH,
TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH_DEFAULT);
this.sharedFetchEnabled = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH,
TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH_DEFAULT);
this.shufflePhaseTime = inputContext.getCounters().findCounter(TaskCounter.SHUFFLE_PHASE_TIME);
this.firstEventReceived = inputContext.getCounters().findCounter(TaskCounter.FIRST_EVENT_RECEIVED);
this.lastEventReceived = inputContext.getCounters().findCounter(TaskCounter.LAST_EVENT_RECEIVED);
this.srcNameTrimmed = TezUtilsInternal.cleanVertexName(inputContext.getSourceVertexName());
completedInputSet = Collections.newSetFromMap(new ConcurrentHashMap<InputIdentifier, Boolean>(numInputs));
/**
* In case of pipelined shuffle, it is possible to get multiple FetchedInput per attempt.
* We do not know upfront the number of spills from source.
*/
completedInputs = new LinkedBlockingDeque<FetchedInput>();
knownSrcHosts = new ConcurrentHashMap<String, InputHost>();
pendingHosts = new LinkedBlockingQueue<InputHost>();
obsoletedInputs = Collections.newSetFromMap(new ConcurrentHashMap<InputAttemptIdentifier, Boolean>());
runningFetchers = Collections.newSetFromMap(new ConcurrentHashMap<Fetcher, Boolean>());
int maxConfiguredFetchers =
conf.getInt(
TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES,
TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES_DEFAULT);
this.numFetchers = Math.min(maxConfiguredFetchers, numInputs);
ExecutorService fetcherRawExecutor = Executors.newFixedThreadPool(
numFetchers,
new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("Fetcher {" + srcNameTrimmed + "} #%d").build());
this.fetcherExecutor = MoreExecutors.listeningDecorator(fetcherRawExecutor);
ExecutorService schedulerRawExecutor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder()
.setDaemon(true).setNameFormat("ShuffleRunner {" + srcNameTrimmed + "}").build());
this.schedulerExecutor = MoreExecutors.listeningDecorator(schedulerRawExecutor);
this.schedulerCallable = new RunShuffleCallable(conf);
this.startTime = System.currentTimeMillis();
this.lastProgressTime = startTime;
SecretKey shuffleSecret = ShuffleUtils
.getJobTokenSecretFromTokenBytes(inputContext
.getServiceConsumerMetaData(TezConstants.TEZ_SHUFFLE_HANDLER_SERVICE_ID));
this.jobTokenSecretMgr = new JobTokenSecretManager(shuffleSecret);
this.asyncHttp = conf.getBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_USE_ASYNC_HTTP, false);
httpConnectionParams = ShuffleUtils.getHttpConnectionParams(conf);
this.localFs = (RawLocalFileSystem) FileSystem.getLocal(conf).getRaw();
this.localDirAllocator = new LocalDirAllocator(
TezRuntimeFrameworkConfigs.LOCAL_DIRS);
this.localDisks = Iterables.toArray(
localDirAllocator.getAllLocalPathsToRead(".", conf), Path.class);
this.localhostName = inputContext.getExecutionContext().getHostName();
final ByteBuffer shuffleMetaData =
inputContext.getServiceProviderMetaData(ShuffleUtils.SHUFFLE_HANDLER_SERVICE_ID);
this.shufflePort = ShuffleUtils.deserializeShuffleProviderMetaData(shuffleMetaData);
/**
* Setting to very high val can lead to Http 400 error. Cap it to 75; every attempt id would
* be approximately 48 bytes; 48 * 75 = 3600 which should give some room for other info in URL.
*/
this.maxTaskOutputAtOnce = Math.max(1, Math.min(75, conf.getInt(
TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE,
TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE_DEFAULT)));
Arrays.sort(this.localDisks);
shuffleInfoEventsMap = new ConcurrentHashMap<InputIdentifier, ShuffleEventInfo>();
LOG.info(srcNameTrimmed + ": numInputs=" + numInputs + ", compressionCodec="
+ (codec == null ? "NoCompressionCodec" : codec.getClass().getName()) + ", numFetchers="
+ numFetchers + ", ifileBufferSize=" + ifileBufferSize + ", ifileReadAheadEnabled="
+ ifileReadAhead + ", ifileReadAheadLength=" + ifileReadAheadLength +", "
+ "localDiskFetchEnabled=" + localDiskFetchEnabled + ", "
+ "sharedFetchEnabled=" + sharedFetchEnabled + ", "
+ httpConnectionParams.toString() + ", maxTaskOutputAtOnce=" + maxTaskOutputAtOnce);
}
public void run() throws IOException {
Preconditions.checkState(inputManager != null, "InputManager must be configured");
ListenableFuture<Void> runShuffleFuture = schedulerExecutor.submit(schedulerCallable);
Futures.addCallback(runShuffleFuture, new SchedulerFutureCallback());
// Shutdown this executor once this task, and the callback complete.
schedulerExecutor.shutdown();
}
private class RunShuffleCallable extends CallableWithNdc<Void> {
private final Configuration conf;
public RunShuffleCallable(Configuration conf) {
this.conf = conf;
}
@Override
protected Void callInternal() throws Exception {
while (!isShutdown.get() && numCompletedInputs.get() < numInputs) {
lock.lock();
try {
if (runningFetchers.size() >= numFetchers || pendingHosts.isEmpty()) {
if (numCompletedInputs.get() < numInputs) {
wakeLoop.await();
}
}
} finally {
lock.unlock();
}
if (shuffleError != null) {
// InputContext has already been informed of a fatal error. Relying on
// tez to kill the task.
break;
}
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "NumCompletedInputs: " + numCompletedInputs);
}
if (numCompletedInputs.get() < numInputs && !isShutdown.get()) {
lock.lock();
try {
int maxFetchersToRun = numFetchers - runningFetchers.size();
int count = 0;
while (pendingHosts.peek() != null && !isShutdown.get()) {
InputHost inputHost = null;
try {
inputHost = pendingHosts.take();
} catch (InterruptedException e) {
if (isShutdown.get()) {
LOG.info(srcNameTrimmed + ": " + "Interrupted and hasBeenShutdown, Breaking out of ShuffleScheduler Loop");
Thread.currentThread().interrupt();
break;
} else {
throw e;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Processing pending host: " + inputHost.toDetailedString());
}
if (inputHost.getNumPendingInputs() > 0 && !isShutdown.get()) {
Fetcher fetcher = constructFetcherForHost(inputHost, conf);
runningFetchers.add(fetcher);
if (isShutdown.get()) {
LOG.info(srcNameTrimmed + ": " + "hasBeenShutdown, Breaking out of ShuffleScheduler Loop");
break;
}
ListenableFuture<FetchResult> future = fetcherExecutor
.submit(fetcher);
Futures.addCallback(future, new FetchFutureCallback(fetcher));
if (++count >= maxFetchersToRun) {
break;
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Skipping host: " + inputHost.getIdentifier()
+ " since it has no inputs to process");
}
}
}
} finally {
lock.unlock();
}
}
}
shufflePhaseTime.setValue(System.currentTimeMillis() - startTime);
LOG.info(srcNameTrimmed + ": " + "Shutting down FetchScheduler, Was Interrupted: " + Thread.currentThread().isInterrupted());
if (!fetcherExecutor.isShutdown()) {
fetcherExecutor.shutdownNow();
}
return null;
}
}
private boolean validateInputAttemptForPipelinedShuffle(InputAttemptIdentifier input) {
//For pipelined shuffle.
//TODO: TEZ-2132 for error handling. As of now, fail fast if there is a different attempt
if (input.canRetrieveInputInChunks()) {
ShuffleEventInfo eventInfo = shuffleInfoEventsMap.get(input.getInputIdentifier());
if (eventInfo != null && input.getAttemptNumber() != eventInfo.attemptNum) {
//speculative attempts or failure attempts. Fail fast here.
reportFatalError(new IOException(), input + " already exists. "
+ "Previous attempt's data could have been already merged "
+ "to memory/disk outputs. Failing the fetch early. currentAttemptNum=" + eventInfo
.attemptNum + ", eventsProcessed=" + eventInfo.eventsProcessed + ", newAttemptNum=" +
input.getAttemptNumber());
return false;
}
}
return true;
}
private Fetcher constructFetcherForHost(InputHost inputHost, Configuration conf) {
Path lockDisk = null;
if (sharedFetchEnabled) {
// pick a single lock disk from the edge name's hashcode + host hashcode
final int h = Math.abs(Objects.hashCode(this.srcNameTrimmed, inputHost.getHost()));
lockDisk = new Path(this.localDisks[h % this.localDisks.length], "locks");
}
FetcherBuilder fetcherBuilder = new FetcherBuilder(ShuffleManager.this,
httpConnectionParams, inputManager, inputContext.getApplicationId(), inputContext.getDagIdentifier(),
jobTokenSecretMgr, srcNameTrimmed, conf, localFs, localDirAllocator,
lockDisk, localDiskFetchEnabled, sharedFetchEnabled,
localhostName, shufflePort, asyncHttp);
if (codec != null) {
fetcherBuilder.setCompressionParameters(codec);
}
fetcherBuilder.setIFileParams(ifileReadAhead, ifileReadAheadLength);
// Remove obsolete inputs from the list being given to the fetcher. Also
// remove from the obsolete list.
List<InputAttemptIdentifier> pendingInputsForHost = inputHost
.clearAndGetPendingInputs();
int includedMaps = 0;
for (Iterator<InputAttemptIdentifier> inputIter = pendingInputsForHost
.iterator(); inputIter.hasNext();) {
InputAttemptIdentifier input = inputIter.next();
//For pipelined shuffle.
if (!validateInputAttemptForPipelinedShuffle(input)) {
continue;
}
// Avoid adding attempts which have already completed.
if (completedInputSet.contains(input.getInputIdentifier())) {
inputIter.remove();
continue;
}
// Avoid adding attempts which have been marked as OBSOLETE
if (obsoletedInputs.contains(input)) {
inputIter.remove();
continue;
}
// Check if max threshold is met
if (includedMaps >= maxTaskOutputAtOnce) {
inputIter.remove();
inputHost.addKnownInput(input); //add to inputHost
} else {
includedMaps++;
}
}
if (inputHost.getNumPendingInputs() > 0) {
pendingHosts.add(inputHost); //add it to queue
}
fetcherBuilder.assignWork(inputHost.getHost(), inputHost.getPort(),
inputHost.getSrcPhysicalIndex(), pendingInputsForHost);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Fetcher for host: " + inputHost.getHost()
+ ", info: " + inputHost.getAdditionalInfo()
+ ", with inputs: " + pendingInputsForHost);
}
return fetcherBuilder.build();
}
/////////////////// Methods for InputEventHandler
public void addKnownInput(String hostName, int port,
InputAttemptIdentifier srcAttemptIdentifier, int srcPhysicalIndex) {
String identifier = InputHost.createIdentifier(hostName, port);
InputHost host = knownSrcHosts.get(identifier);
if (host == null) {
host = new InputHost(hostName, port, inputContext.getApplicationId(), srcPhysicalIndex);
assert identifier.equals(host.getIdentifier());
InputHost old = knownSrcHosts.putIfAbsent(identifier, host);
if (old != null) {
host = old;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Adding input: " + srcAttemptIdentifier + ", to host: " + host);
}
if (!validateInputAttemptForPipelinedShuffle(srcAttemptIdentifier)) {
return;
}
InputIdentifier inputIdentifier = srcAttemptIdentifier.getInputIdentifier();
if (shuffleInfoEventsMap.get(inputIdentifier) == null) {
shuffleInfoEventsMap.put(inputIdentifier, new ShuffleEventInfo(srcAttemptIdentifier));
}
host.addKnownInput(srcAttemptIdentifier);
lock.lock();
try {
boolean added = pendingHosts.offer(host);
if (!added) {
String errorMessage = "Unable to add host: " + host.getIdentifier() + " to pending queue";
LOG.error(errorMessage);
throw new TezUncheckedException(errorMessage);
}
wakeLoop.signal();
} finally {
lock.unlock();
}
}
public void addCompletedInputWithNoData(
InputAttemptIdentifier srcAttemptIdentifier) {
InputIdentifier inputIdentifier = srcAttemptIdentifier.getInputIdentifier();
if (LOG.isDebugEnabled()) {
LOG.debug("No input data exists for SrcTask: " + inputIdentifier + ". Marking as complete.");
}
if (!completedInputSet.contains(inputIdentifier)) {
synchronized (completedInputSet) {
if (!completedInputSet.contains(inputIdentifier)) {
NullFetchedInput fetchedInput = new NullFetchedInput(srcAttemptIdentifier);
if (!srcAttemptIdentifier.canRetrieveInputInChunks()) {
registerCompletedInput(fetchedInput);
} else {
registerCompletedInputForPipelinedShuffle(srcAttemptIdentifier, fetchedInput);
}
}
}
}
// Awake the loop to check for termination.
lock.lock();
try {
wakeLoop.signal();
} finally {
lock.unlock();
}
}
protected synchronized void updateEventReceivedTime() {
long relativeTime = System.currentTimeMillis() - startTime;
if (firstEventReceived.getValue() == 0) {
firstEventReceived.setValue(relativeTime);
lastEventReceived.setValue(relativeTime);
return;
}
lastEventReceived.setValue(relativeTime);
}
public synchronized void obsoleteKnownInput(InputAttemptIdentifier srcAttemptIdentifier) {
obsoletedInputs.add(srcAttemptIdentifier);
// TODO NEWTEZ Maybe inform the fetcher about this. For now, this is used during the initial fetch list construction.
}
/////////////////// End of Methods for InputEventHandler
/////////////////// Methods from FetcherCallbackHandler
/**
* Placeholder for tracking shuffle events in case we get multiple spills info for the same
* attempt.
*/
static class ShuffleEventInfo {
BitSet eventsProcessed;
int finalEventId = -1; //0 indexed
int attemptNum;
String id;
ShuffleEventInfo(InputAttemptIdentifier input) {
this.id = input.getInputIdentifier().getInputIndex() + "_" + input.getAttemptNumber();
this.eventsProcessed = new BitSet();
this.attemptNum = input.getAttemptNumber();
}
void spillProcessed(int spillId) {
if (finalEventId != -1) {
Preconditions.checkState(eventsProcessed.cardinality() <= (finalEventId + 1),
"Wrong state. eventsProcessed cardinality=" + eventsProcessed.cardinality() + " "
+ "finalEventId=" + finalEventId + ", spillId=" + spillId + ", " + toString());
}
eventsProcessed.set(spillId);
}
void setFinalEventId(int spillId) {
finalEventId = spillId;
}
boolean isDone() {
if (LOG.isDebugEnabled()) {
LOG.debug("finalEventId=" + finalEventId + ", eventsProcessed cardinality=" +
eventsProcessed.cardinality());
}
return ((finalEventId != -1) && (finalEventId + 1) == eventsProcessed.cardinality());
}
public String toString() {
return "[eventsProcessed=" + eventsProcessed + ", finalEventId=" + finalEventId
+ ", id=" + id + ", attemptNum=" + attemptNum + "]";
}
}
@Override
public void fetchSucceeded(String host, InputAttemptIdentifier srcAttemptIdentifier,
FetchedInput fetchedInput, long fetchedBytes, long decompressedLength, long copyDuration)
throws IOException {
InputIdentifier inputIdentifier = srcAttemptIdentifier.getInputIdentifier();
// Count irrespective of whether this is a copy of an already fetched input
lock.lock();
try {
lastProgressTime = System.currentTimeMillis();
} finally {
lock.unlock();
}
boolean committed = false;
if (!completedInputSet.contains(inputIdentifier)) {
synchronized (completedInputSet) {
if (!completedInputSet.contains(inputIdentifier)) {
fetchedInput.commit();
committed = true;
ShuffleUtils.logIndividualFetchComplete(LOG, copyDuration,
fetchedBytes, decompressedLength, fetchedInput.getType().toString(), srcAttemptIdentifier);
// Processing counters for completed and commit fetches only. Need
// additional counters for excessive fetches - which primarily comes
// in after speculation or retries.
shuffledInputsCounter.increment(1);
bytesShuffledCounter.increment(fetchedBytes);
if (fetchedInput.getType() == Type.MEMORY) {
bytesShuffledToMemCounter.increment(fetchedBytes);
} else if (fetchedInput.getType() == Type.DISK) {
bytesShuffledToDiskCounter.increment(fetchedBytes);
} else if (fetchedInput.getType() == Type.DISK_DIRECT) {
bytesShuffledDirectDiskCounter.increment(fetchedBytes);
}
decompressedDataSizeCounter.increment(decompressedLength);
if (!srcAttemptIdentifier.canRetrieveInputInChunks()) {
registerCompletedInput(fetchedInput);
} else {
registerCompletedInputForPipelinedShuffle(srcAttemptIdentifier, fetchedInput);
}
lock.lock();
try {
totalBytesShuffledTillNow += fetchedBytes;
logProgress();
} finally {
lock.unlock();
}
}
}
}
if (!committed) {
fetchedInput.abort(); // If this fails, the fetcher may attempt another abort.
} else {
lock.lock();
try {
// Signal the wakeLoop to check for termination.
wakeLoop.signal();
} finally {
lock.unlock();
}
}
// TODO NEWTEZ Maybe inform fetchers, in case they have an alternate attempt of the same task in their queue.
}
private void registerCompletedInput(FetchedInput fetchedInput) {
lock.lock();
try {
maybeInformInputReady(fetchedInput);
adjustCompletedInputs(fetchedInput);
numFetchedSpills.getAndIncrement();
} finally {
lock.unlock();
}
}
private void maybeInformInputReady(FetchedInput fetchedInput) {
lock.lock();
try {
completedInputs.add(fetchedInput);
if (!inputReadyNotificationSent.getAndSet(true)) {
// TODO Should eventually be controlled by Inputs which are processing the data.
inputContext.inputIsReady();
}
} finally {
lock.unlock();
}
}
private void adjustCompletedInputs(FetchedInput fetchedInput) {
lock.lock();
try {
completedInputSet.add(fetchedInput.getInputAttemptIdentifier().getInputIdentifier());
int numComplete = numCompletedInputs.incrementAndGet();
if (numComplete == numInputs) {
LOG.info("All inputs fetched for input vertex : " + inputContext.getSourceVertexName());
}
} finally {
lock.unlock();
}
}
private void registerCompletedInputForPipelinedShuffle(InputAttemptIdentifier
srcAttemptIdentifier, FetchedInput fetchedInput) {
/**
* For pipelinedshuffle it is possible to get multiple spills. Claim success only when
* all spills pertaining to an attempt are done.
*/
if (!validateInputAttemptForPipelinedShuffle(srcAttemptIdentifier)) {
return;
}
InputIdentifier inputIdentifier = srcAttemptIdentifier.getInputIdentifier();
ShuffleEventInfo eventInfo = shuffleInfoEventsMap.get(inputIdentifier);
//for empty partition case
if (eventInfo == null && fetchedInput instanceof NullFetchedInput) {
eventInfo = new ShuffleEventInfo(srcAttemptIdentifier);
shuffleInfoEventsMap.put(inputIdentifier, eventInfo);
}
assert(eventInfo != null);
eventInfo.spillProcessed(srcAttemptIdentifier.getSpillEventId());
numFetchedSpills.getAndIncrement();
if (srcAttemptIdentifier.getFetchTypeInfo() == InputAttemptIdentifier.SPILL_INFO.FINAL_UPDATE) {
eventInfo.setFinalEventId(srcAttemptIdentifier.getSpillEventId());
}
lock.lock();
try {
/**
* When fetch is complete for a spill, add it to completedInputs to ensure that it is
* available for downstream processing. Final success will be claimed only when all
* spills are downloaded from the source.
*/
maybeInformInputReady(fetchedInput);
//check if we downloaded all spills pertaining to this InputAttemptIdentifier
if (eventInfo.isDone()) {
adjustCompletedInputs(fetchedInput);
shuffleInfoEventsMap.remove(srcAttemptIdentifier.getInputIdentifier());
}
} finally {
lock.unlock();
}
if (LOG.isTraceEnabled()) {
LOG.trace("eventInfo " + eventInfo.toString());
}
}
private void reportFatalError(Throwable exception, String message) {
LOG.error(message);
inputContext.fatalError(exception, message);
}
@Override
public void fetchFailed(String host,
InputAttemptIdentifier srcAttemptIdentifier, boolean connectFailed) {
// TODO NEWTEZ. Implement logic to report fetch failures after a threshold.
// For now, reporting immediately.
LOG.info(srcNameTrimmed + ": " + "Fetch failed for src: " + srcAttemptIdentifier
+ "InputIdentifier: " + srcAttemptIdentifier + ", connectFailed: "
+ connectFailed);
failedShufflesCounter.increment(1);
if (srcAttemptIdentifier == null) {
reportFatalError(null, "Received fetchFailure for an unknown src (null)");
} else {
InputReadErrorEvent readError = InputReadErrorEvent.create(
"Fetch failure while fetching from "
+ TezRuntimeUtils.getTaskAttemptIdentifier(
inputContext.getSourceVertexName(),
srcAttemptIdentifier.getInputIdentifier().getInputIndex(),
srcAttemptIdentifier.getAttemptNumber()),
srcAttemptIdentifier.getInputIdentifier().getInputIndex(),
srcAttemptIdentifier.getAttemptNumber());
List<Event> failedEvents = Lists.newArrayListWithCapacity(1);
failedEvents.add(readError);
inputContext.sendEvents(failedEvents);
}
}
/////////////////// End of Methods from FetcherCallbackHandler
public void shutdown() throws InterruptedException {
if (Thread.currentThread().isInterrupted()) {
//TODO: need to cleanup all FetchedInput (DiskFetchedInput, LocalDisFetchedInput), lockFile
//As of now relying on job cleanup (when all directories would be cleared)
LOG.info(srcNameTrimmed + ": " + "Thread interrupted. Need to cleanup the local dirs");
}
if (!isShutdown.getAndSet(true)) {
// Shut down any pending fetchers
LOG.info("Shutting down pending fetchers on source" + srcNameTrimmed + ": "
+ runningFetchers.size());
lock.lock();
try {
wakeLoop.signal(); // signal the fetch-scheduler
for (Fetcher fetcher : runningFetchers) {
fetcher.shutdown(); // This could be parallelized.
}
} finally {
lock.unlock();
}
if (this.schedulerExecutor != null && !this.schedulerExecutor.isShutdown()) {
this.schedulerExecutor.shutdownNow();
}
if (this.fetcherExecutor != null && !this.fetcherExecutor.isShutdown()) {
this.fetcherExecutor.shutdownNow(); // Interrupts all running fetchers.
}
}
}
/////////////////// Methods for walking the available inputs
/**
* @return true if there is another input ready for consumption.
*/
public boolean newInputAvailable() {
FetchedInput head = completedInputs.peek();
if (head == null || head instanceof NullFetchedInput) {
return false;
} else {
return true;
}
}
/**
* @return true if all of the required inputs have been fetched.
*/
public boolean allInputsFetched() {
lock.lock();
try {
return numCompletedInputs.get() == numInputs;
} finally {
lock.unlock();
}
}
/**
* @return the next available input, or null if there are no available inputs.
* This method will block if there are currently no available inputs,
* but more may become available.
*/
public FetchedInput getNextInput() throws InterruptedException {
FetchedInput input = null;
do {
// Check for no additional inputs
lock.lock();
try {
input = completedInputs.peek();
if (input == null && allInputsFetched()) {
break;
}
} finally {
lock.unlock();
}
input = completedInputs.take(); // block
} while (input instanceof NullFetchedInput);
return input;
}
/////////////////// End of methods for walking the available inputs
/**
* Fake input that is added to the completed input list in case an input does not have any data.
*
*/
private static class NullFetchedInput extends FetchedInput {
public NullFetchedInput(InputAttemptIdentifier inputAttemptIdentifier) {
super(Type.MEMORY, -1, -1, inputAttemptIdentifier, null);
}
@Override
public OutputStream getOutputStream() throws IOException {
throw new UnsupportedOperationException("Not supported for NullFetchedInput");
}
@Override
public InputStream getInputStream() throws IOException {
throw new UnsupportedOperationException("Not supported for NullFetchedInput");
}
@Override
public void commit() throws IOException {
throw new UnsupportedOperationException("Not supported for NullFetchedInput");
}
@Override
public void abort() throws IOException {
throw new UnsupportedOperationException("Not supported for NullFetchedInput");
}
@Override
public void free() {
throw new UnsupportedOperationException("Not supported for NullFetchedInput");
}
}
private final AtomicInteger nextProgressLineEventCount = new AtomicInteger(0);
private void logProgress() {
int inputsDone = numCompletedInputs.get();
if (inputsDone > nextProgressLineEventCount.get() || inputsDone == numInputs) {
nextProgressLineEventCount.addAndGet(50);
double mbs = (double) totalBytesShuffledTillNow / (1024 * 1024);
long secsSinceStart = (System.currentTimeMillis() - startTime) / 1000 + 1;
double transferRate = mbs / secsSinceStart;
LOG.info("copy(" + inputsDone + " (spillsFetched=" + numFetchedSpills.get() + ") of " +
numInputs +
". Transfer rate (CumulativeDataFetched/TimeSinceInputStarted)) "
+ mbpsFormat.format(transferRate) + " MB/s)");
}
}
private class SchedulerFutureCallback implements FutureCallback<Void> {
@Override
public void onSuccess(Void result) {
LOG.info(srcNameTrimmed + ": " + "Scheduler thread completed");
}
@Override
public void onFailure(Throwable t) {
if (isShutdown.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Already shutdown. Ignoring error: " + t);
}
} else {
LOG.error(srcNameTrimmed + ": " + "Scheduler failed with error: ", t);
inputContext.fatalError(t, "Shuffle Scheduler Failed");
}
}
}
private class FetchFutureCallback implements FutureCallback<FetchResult> {
private final Fetcher fetcher;
public FetchFutureCallback(Fetcher fetcher) {
this.fetcher = fetcher;
}
private void doBookKeepingForFetcherComplete() {
lock.lock();
try {
runningFetchers.remove(fetcher);
wakeLoop.signal();
} finally {
lock.unlock();
}
}
@Override
public void onSuccess(FetchResult result) {
fetcher.shutdown();
if (isShutdown.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Already shutdown. Ignoring event from fetcher");
}
} else {
Iterable<InputAttemptIdentifier> pendingInputs = result.getPendingInputs();
if (pendingInputs != null && pendingInputs.iterator().hasNext()) {
InputHost inputHost = knownSrcHosts.get(InputHost.createIdentifier(result.getHost(), result.getPort()));
assert inputHost != null;
for (InputAttemptIdentifier input : pendingInputs) {
inputHost.addKnownInput(input);
}
inputHost.setAdditionalInfo(result.getAdditionalInfo());
pendingHosts.add(inputHost);
}
doBookKeepingForFetcherComplete();
}
}
@Override
public void onFailure(Throwable t) {
// Unsuccessful - the fetcher may not have shutdown correctly. Try shutting it down.
fetcher.shutdown();
if (isShutdown.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug(srcNameTrimmed + ": " + "Already shutdown. Ignoring error from fetcher: " + t);
}
} else {
LOG.error(srcNameTrimmed + ": " + "Fetcher failed with error: ", t);
shuffleError = t;
inputContext.fatalError(t, "Fetch failed");
doBookKeepingForFetcherComplete();
}
}
}
}
| |
//%2005////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2000, 2001, 2002 BMC Software; Hewlett-Packard Development
// Company, L.P.; IBM Corp.; The Open Group; Tivoli Systems.
// Copyright (c) 2003 BMC Software; Hewlett-Packard Development Company, L.P.;
// IBM Corp.; EMC Corporation, The Open Group.
// Copyright (c) 2004 BMC Software; Hewlett-Packard Development Company, L.P.;
// IBM Corp.; EMC Corporation; VERITAS Software Corporation; The Open Group.
// Copyright (c) 2005 Hewlett-Packard Development Company, L.P.; IBM Corp.;
// EMC Corporation; VERITAS Software Corporation; The Open Group.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE SHALL BE INCLUDED IN
// ALL COPIES OR SUBSTANTIAL PORTIONS OF THE SOFTWARE. THE SOFTWARE IS PROVIDED
// "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//==============================================================================
//
// Author: Adrian Schuur, schuur@de.ibm.com
//
// Modified By: Mark Hamzy, hamzy@us.ibm.com
//
//%/////////////////////////////////////////////////////////////////////////////
package org.pegasus.jmpi;
import java.util.*;
import org.pegasus.jmpi.*;
public class CIMClient {
public static final String WQL = "WQL";
public static final String WQL1 = "WQL1";
public static final String WQL2 = "WQL2";
public static final int HTTP = 2;
public static final int XML = 2;
public static final boolean DEEP = true;
CIMNameSpace ns;
int cInst;
int cNsInst;
private native int _newNaUnPw (int name,
String userName,
String passWord);
private native void _disconnect (int cc);
private native int _getClass (int cc,
int ns,
int path,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[]);
private native int _deleteClass (int cc,
int ns,
int path);
private native void _createClass (int cc,
int ns,
int path,
int ci);
private native void _setClass (int cc,
int ns,
int path,
int ci);
private native int _getInstance (int cc,
int ns,
int path,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[]);
private native void _deleteInstance (int cc,
int ns,
int path);
private native int _createInstance (int cc,
int ns,
int path,
int ci);
private native void _modifyInstance (int cc,
int ns,
int path,
int ci,
boolean includeQualifiers,
String propertyList[]);
private native int _enumerateClasses (int cc,
int ns,
int path,
boolean deep,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin);
private native int _enumerateClassNames (int cc,
int ns,
int path,
boolean deep);
private native int _enumerateInstanceNames (int cc,
int ns,
int path,
boolean deep);
private native int _enumerateInstances (int cc,
int ns,
int path,
boolean deep,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[]);
private native int _enumerateQualifiers (int cc,
int ns,
int path);
private native int _getQualifier (int cc,
int ns,
int path);
private native void _setQualifier (int cc,
int ns,
int path,
int type);
private native void _deleteQualifier (int cc,
int ns,
int path);
private native int _getProperty (int cc,
int ns,
int path,
String propertyName);
private native void _setProperty (int cc,
int ns,
int path,
String propertyName,
int newValue);
private native int _execQuery (int cc,
int ns,
int path,
String query,
String ql);
private native int _invokeMethod (int cc,
int ns,
int path,
String methodName,
Vector inParams,
Vector outParams)
throws CIMException;
private native int _invokeMethod24 (int cc,
int ns,
int path,
String methodName,
CIMArgument[] inParams,
CIMArgument[] outParams)
throws CIMException;
private native int _associatorNames (int cc,
int ns,
int path,
String assocClass,
String resultClass,
String role,
String resultRole);
private native int _associators (int cc,
int ns,
int path,
String assocClass,
String resultClass,
String role,
String resultRole,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[]);
private native int _referenceNames (int cc,
int ns,
int path,
String resultClass,
String role);
private native int _references (int cc,
int ns,
int path,
String resultClass,
String role,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[]);
private native void _createNameSpace (int cc,
String ns);
private native Vector _enumerateNameSpaces (int cc,
int path,
boolean deep,
Vector v);
private native void _deleteNameSpace (int cc,
String ns);
private native void _finalize (int cInst);
protected void finalize ()
{
_finalize(cInst);
}
public CIMNameSpace getNameSpace ()
{
return ns;
}
public CIMClient(CIMNameSpace nameSpace,
String userName,
String pword)
throws CIMException
{
ns=nameSpace;
cNsInst=nameSpace.cInst;
cInst=_newNaUnPw(cNsInst,userName,pword);
}
public CIMClient (CIMNameSpace nameSpace,
String userName,
String pword,
int type)
throws CIMException
{
if (type!=HTTP)
throw new CIMException("Specified protocol type not supported.");
ns=nameSpace;
cNsInst=nameSpace.cInst;
cInst=_newNaUnPw(cNsInst,userName,pword);
}
/*
public CIMClient(CIMNameSpace name,
String userName,
String pword,
String roleName,
String rolePwd)
throws CIMException
{
}
public CIMClient(CIMNameSpace name,
String userName,
String pword,
String roleName,
String rolePwd,
int type)
throws CIMException
{
}
*/
public void close()
throws CIMException
{
_disconnect (cInst);
}
public void createNameSpace (CIMNameSpace ins)
throws CIMException
{
_createNameSpace(cInst,ins.getNameSpace());
}
public void deleteNameSpace (CIMNameSpace cns)
throws CIMException
{
_deleteNameSpace(cInst,cns.getNameSpace());
}
public void deleteClass (CIMObjectPath path)
throws CIMException
{
_deleteClass(cInst,cNsInst,path.cInst);
}
public void deleteInstance (CIMObjectPath path)
throws CIMException
{
_deleteInstance(cInst,cNsInst,path.cInst);
}
public Enumeration enumNameSpace (CIMObjectPath path,
boolean deep)
throws CIMException
{
return _enumerateNameSpaces(cInst,path.cInst,deep, new Vector()).elements();
}
public Enumeration enumerateNameSpaces (CIMObjectPath path,
boolean deepInheritance)
throws CIMException
{
return enumNameSpace(path, deepInheritance);
}
public Enumeration enumClass (CIMObjectPath path,
boolean deep,
boolean local)
throws CIMException
{
return new ClassEnumeration(_enumerateClasses(cInst,
cNsInst,
path.cInst,
deep,
local,
true,
false));
}
public Enumeration enumerateClasses (CIMObjectPath path,
boolean deepInheritance,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin)
throws CIMException
{
return new ClassEnumeration(_enumerateClasses(cInst,
cNsInst,
path.cInst,
deepInheritance,
localOnly,
includeQualifiers,
includeClassOrigin));
}
public Enumeration enumClass (CIMObjectPath path,
boolean deep)
throws CIMException
{
return new PathEnumeration(_enumerateClassNames(cInst,
cNsInst,
path.cInst,
deep));
}
public Enumeration enumerateClassNames (CIMObjectPath path,
boolean deep)
throws CIMException
{
return enumClass(path,deep);
}
public Enumeration enumInstances (CIMObjectPath path,
boolean deep)
throws CIMException
{
return new PathEnumeration(_enumerateInstanceNames(cInst,
cNsInst,
path.cInst,
deep));
}
public Enumeration enumerateInstanceNames (CIMObjectPath path)
throws CIMException
{
return enumInstances(path,false);
}
public Enumeration enumInstances (CIMObjectPath path,
boolean deep,
boolean localOnly)
throws CIMException
{
return enumerateInstances(path,
deep,
localOnly,
true,
false,
(String[])null);
}
public Enumeration enumerateInstances (CIMObjectPath path,
boolean deepInheritance,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[])
throws CIMException
{
return new InstEnumeration(_enumerateInstances(cInst,
cNsInst,
path.cInst,
deepInheritance,
localOnly,
includeQualifiers,
includeClassOrigin,
propertyList));
}
public CIMClass getClass (CIMObjectPath path,
boolean localOnly)
throws CIMException
{
return getClass(path,localOnly,true,false,(String[])null);
}
public CIMClass getClass (CIMObjectPath path,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[])
throws CIMException
{
return new CIMClass(_getClass(cInst,
cNsInst,
path.cInst,
localOnly,
includeQualifiers,
includeClassOrigin,
propertyList));
}
public CIMInstance getInstance (CIMObjectPath path,
boolean localOnly)
throws CIMException
{
return getInstance(path,localOnly,true,false,(String[])null);
}
public CIMInstance getInstance (CIMObjectPath path,
boolean localOnly,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[])
throws CIMException
{
return new CIMInstance(_getInstance(cInst,
cNsInst,
path.cInst,
localOnly,
includeQualifiers,
includeClassOrigin,
propertyList));
}
public CIMValue invokeMethod (CIMObjectPath path,
String methodName,
Vector inParams,
Vector outParams)
throws CIMException
{
return new CIMValue(_invokeMethod(cInst,
cNsInst,
path.cInst,
methodName,
inParams,
outParams));
}
public CIMValue invokeMethod (CIMObjectPath path,
String methodName,
CIMArgument[] inParams,
CIMArgument[] outParams)
throws CIMException
{
throw new CIMException(CIMException.CIM_ERR_NOT_SUPPORTED);
///////return new CIMValue(_invokeMethod24(cInst,cNsInst,path.cInst,methodName,inParams,outParams));
}
public Enumeration enumQualifierTypes (CIMObjectPath path)
throws CIMException
{
return new QualEnumeration(_enumerateQualifiers(cInst,cNsInst,path.cInst));
}
public Enumeration enumerateQualifiers (CIMObjectPath path)
throws CIMException
{
return new QualEnumeration(_enumerateQualifiers(cInst,cNsInst,path.cInst));
}
public void deleteQualifierType (CIMObjectPath path)
throws CIMException
{
_deleteQualifier(cInst,cNsInst,path.cInst);
}
public void deleteQualifier (CIMObjectPath path)
throws CIMException
{
_deleteQualifier(cInst,cNsInst,path.cInst);
}
public CIMQualifierType getQualifierType (CIMObjectPath path)
throws CIMException
{
return new CIMQualifierType(_getQualifier(cInst,cNsInst,path.cInst));
}
public CIMQualifierType getQualifier (CIMObjectPath path)
throws CIMException
{
return new CIMQualifierType(_getQualifier(cInst,cNsInst,path.cInst));
}
public void createQualifierType (CIMObjectPath path,
CIMQualifierType qt)
throws CIMException
{
_setQualifier(cInst,cNsInst,path.cInst,qt.cInst);
}
public void setQualifierType (CIMObjectPath path,
CIMQualifierType qt)
throws CIMException
{
_setQualifier(cInst,cNsInst,path.cInst,qt.cInst);
}
public void setQualifier (CIMObjectPath path,
CIMQualifierType qualifierType)
throws CIMException
{
setQualifierType(path,qualifierType);
}
public void createClass (CIMObjectPath path,
CIMClass cc)
throws CIMException
{
_createClass(cInst,cNsInst,path.cInst,cc.cInst);
}
public void setClass (CIMObjectPath path,
CIMClass cc)
throws CIMException
{
_setClass(cInst,cNsInst,path.cInst,cc.cInst);
}
public void modifyClass (CIMObjectPath path,
CIMClass modifiedClass)
throws CIMException
{
setClass(path,modifiedClass);
}
public CIMObjectPath createInstance (CIMObjectPath path,
CIMInstance ci)
throws CIMException
{
return new CIMObjectPath(_createInstance(cInst,cNsInst,path.cInst,ci.cInst));
}
public void setInstance (CIMObjectPath path,
CIMInstance ci)
throws CIMException
{
modifyInstance(path,ci,true,(String[])null);
}
public void modifyInstance (CIMObjectPath path,
CIMInstance modifiedInstance,
boolean includeQualifiers,
String propertyList[])
throws CIMException
{
_modifyInstance(cInst,
cNsInst,
path.cInst,
modifiedInstance.cInst,
includeQualifiers,
propertyList);
}
public CIMValue getProperty (CIMObjectPath path,
String propertyName)
throws CIMException
{
return new CIMValue(_getProperty(cInst,
cNsInst,
path.cInst,
propertyName));
}
public void setProperty (CIMObjectPath path,
String propertyName,
CIMValue newValue)
throws CIMException
{
_setProperty(cInst,cNsInst,path.cInst,propertyName,newValue.cInst);
}
public Enumeration execQuery(CIMObjectPath path,
String query,
String ql)
throws CIMException
{
return new InstEnumeration(_execQuery(cInst,cNsInst,path.cInst,query,ql));
}
public Enumeration associatorNames (CIMObjectPath path,
String assocClass,
String resultClass,
String role,
String resultRole)
throws CIMException
{
return new PathEnumeration(_associatorNames(cInst,
cNsInst,
path.cInst,
assocClass,
resultClass,
role,
resultRole));
}
public Enumeration associators (CIMObjectPath path,
String assocClass,
String resultClass,
String role,
String resultRole,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[])
throws CIMException
{
return new InstEnumeration(_associators(cInst,
cNsInst,
path.cInst,
assocClass,
resultClass,
role,
resultRole,
includeQualifiers,
includeClassOrigin,
propertyList));
}
public Enumeration referenceNames (CIMObjectPath path,
String resultClass,
String role)
throws CIMException
{
return new PathEnumeration(_referenceNames(cInst,
cNsInst,
path.cInst,
resultClass,
role));
}
public Enumeration references (CIMObjectPath path,
String resultClass,
String role,
boolean includeQualifiers,
boolean includeClassOrigin,
String propertyList[])
throws CIMException
{
return new InstEnumeration(_references(cInst,
cNsInst,
path.cInst,
resultClass,
role,
includeQualifiers,
includeClassOrigin,
propertyList));
}
/*
public CIMNameSpace getNameSpace ()
{
}
public CIMFeatures getServerFeatures ()
throws CIMException
{
}
public void authenticate (String userName,
String password)
throws CIMException
{
}
*/
static {
System.loadLibrary("JMPIProviderManager");
}
}
| |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package io.netty.util.internal.chmv8;
/**
* A {@link ForkJoinTask} with a completion action performed when
* triggered and there are no remaining pending
* actions. CountedCompleters are in general more robust in the
* presence of subtask stalls and blockage than are other forms of
* ForkJoinTasks, but are less intuitive to program. Uses of
* CountedCompleter are similar to those of other completion based
* components (such as {@link java.nio.channels.CompletionHandler})
* except that multiple <em>pending</em> completions may be necessary
* to trigger the {@link #onCompletion} action, not just one. Unless
* initialized otherwise, the {@link #getPendingCount pending count}
* starts at zero, but may be (atomically) changed using methods
* {@link #setPendingCount}, {@link #addToPendingCount}, and {@link
* #compareAndSetPendingCount}. Upon invocation of {@link
* #tryComplete}, if the pending action count is nonzero, it is
* decremented; otherwise, the completion action is performed, and if
* this completer itself has a completer, the process is continued
* with its completer. As is the case with related synchronization
* components such as {@link java.util.concurrent.Phaser Phaser} and
* {@link java.util.concurrent.Semaphore Semaphore}, these methods
* affect only internal counts; they do not establish any further
* internal bookkeeping. In particular, the identities of pending
* tasks are not maintained. As illustrated below, you can create
* subclasses that do record some or all pending tasks or their
* results when needed. As illustrated below, utility methods
* supporting customization of completion traversals are also
* provided. However, because CountedCompleters provide only basic
* synchronization mechanisms, it may be useful to create further
* abstract subclasses that maintain linkages, fields, and additional
* support methods appropriate for a set of related usages.
*
* <p>A concrete CountedCompleter class must define method {@link
* #compute}, that should in most cases (as illustrated below), invoke
* {@code tryComplete()} once before returning. The class may also
* optionally override method {@link #onCompletion} to perform an
* action upon normal completion, and method {@link
* #onExceptionalCompletion} to perform an action upon any exception.
*
* <p>CountedCompleters most often do not bear results, in which case
* they are normally declared as {@code CountedCompleter<Void>}, and
* will always return {@code null} as a result value. In other cases,
* you should override method {@link #getRawResult} to provide a
* result from {@code join(), invoke()}, and related methods. In
* general, this method should return the value of a field (or a
* function of one or more fields) of the CountedCompleter object that
* holds the result upon completion. Method {@link #setRawResult} by
* default plays no role in CountedCompleters. It is possible, but
* rarely applicable, to override this method to maintain other
* objects or fields holding result data.
*
* <p>A CountedCompleter that does not itself have a completer (i.e.,
* one for which {@link #getCompleter} returns {@code null}) can be
* used as a regular ForkJoinTask with this added functionality.
* However, any completer that in turn has another completer serves
* only as an internal helper for other computations, so its own task
* status (as reported in methods such as {@link ForkJoinTask#isDone})
* is arbitrary; this status changes only upon explicit invocations of
* {@link #complete}, {@link ForkJoinTask#cancel}, {@link
* ForkJoinTask#completeExceptionally} or upon exceptional completion
* of method {@code compute}. Upon any exceptional completion, the
* exception may be relayed to a task's completer (and its completer,
* and so on), if one exists and it has not otherwise already
* completed. Similarly, cancelling an internal CountedCompleter has
* only a local effect on that completer, so is not often useful.
*
* <p><b>Sample Usages.</b>
*
* <p><b>Parallel recursive decomposition.</b> CountedCompleters may
* be arranged in trees similar to those often used with {@link
* RecursiveAction}s, although the constructions involved in setting
* them up typically vary. Here, the completer of each task is its
* parent in the computation tree. Even though they entail a bit more
* bookkeeping, CountedCompleters may be better choices when applying
* a possibly time-consuming operation (that cannot be further
* subdivided) to each element of an array or collection; especially
* when the operation takes a significantly different amount of time
* to complete for some elements than others, either because of
* intrinsic variation (for example I/O) or auxiliary effects such as
* garbage collection. Because CountedCompleters provide their own
* continuations, other threads need not block waiting to perform
* them.
*
* <p>For example, here is an initial version of a class that uses
* divide-by-two recursive decomposition to divide work into single
* pieces (leaf tasks). Even when work is split into individual calls,
* tree-based techniques are usually preferable to directly forking
* leaf tasks, because they reduce inter-thread communication and
* improve load balancing. In the recursive case, the second of each
* pair of subtasks to finish triggers completion of its parent
* (because no result combination is performed, the default no-op
* implementation of method {@code onCompletion} is not overridden). A
* static utility method sets up the base task and invokes it
* (here, implicitly using the {@link ForkJoinPool#commonPool()}).
*
* <pre> {@code
* class MyOperation<E> { void apply(E e) { ... } }
*
* class ForEach<E> extends CountedCompleter<Void> {
*
* public static <E> void forEach(E[] array, MyOperation<E> op) {
* new ForEach<E>(null, array, op, 0, array.length).invoke();
* }
*
* final E[] array; final MyOperation<E> op; final int lo, hi;
* ForEach(CountedCompleter<?> p, E[] array, MyOperation<E> op, int lo, int hi) {
* super(p);
* this.array = array; this.op = op; this.lo = lo; this.hi = hi;
* }
*
* public void compute() { // version 1
* if (hi - lo >= 2) {
* int mid = (lo + hi) >>> 1;
* setPendingCount(2); // must set pending count before fork
* new ForEach(this, array, op, mid, hi).fork(); // right child
* new ForEach(this, array, op, lo, mid).fork(); // left child
* }
* else if (hi > lo)
* op.apply(array[lo]);
* tryComplete();
* }
* }}</pre>
*
* This design can be improved by noticing that in the recursive case,
* the task has nothing to do after forking its right task, so can
* directly invoke its left task before returning. (This is an analog
* of tail recursion removal.) Also, because the task returns upon
* executing its left task (rather than falling through to invoke
* {@code tryComplete}) the pending count is set to one:
*
* <pre> {@code
* class ForEach<E> ...
* public void compute() { // version 2
* if (hi - lo >= 2) {
* int mid = (lo + hi) >>> 1;
* setPendingCount(1); // only one pending
* new ForEach(this, array, op, mid, hi).fork(); // right child
* new ForEach(this, array, op, lo, mid).compute(); // direct invoke
* }
* else {
* if (hi > lo)
* op.apply(array[lo]);
* tryComplete();
* }
* }
* }</pre>
*
* As a further improvement, notice that the left task need not even
* exist. Instead of creating a new one, we can iterate using the
* original task, and add a pending count for each fork. Additionally,
* because no task in this tree implements an {@link #onCompletion}
* method, {@code tryComplete()} can be replaced with {@link
* #propagateCompletion}.
*
* <pre> {@code
* class ForEach<E> ...
* public void compute() { // version 3
* int l = lo, h = hi;
* while (h - l >= 2) {
* int mid = (l + h) >>> 1;
* addToPendingCount(1);
* new ForEach(this, array, op, mid, h).fork(); // right child
* h = mid;
* }
* if (h > l)
* op.apply(array[l]);
* propagateCompletion();
* }
* }</pre>
*
* Additional improvements of such classes might entail precomputing
* pending counts so that they can be established in constructors,
* specializing classes for leaf steps, subdividing by say, four,
* instead of two per iteration, and using an adaptive threshold
* instead of always subdividing down to single elements.
*
* <p><b>Searching.</b> A tree of CountedCompleters can search for a
* value or property in different parts of a data structure, and
* report a result in an {@link
* java.util.concurrent.atomic.AtomicReference AtomicReference} as
* soon as one is found. The others can poll the result to avoid
* unnecessary work. (You could additionally {@linkplain #cancel
* cancel} other tasks, but it is usually simpler and more efficient
* to just let them notice that the result is set and if so skip
* further processing.) Illustrating again with an array using full
* partitioning (again, in practice, leaf tasks will almost always
* process more than one element):
*
* <pre> {@code
* class Searcher<E> extends CountedCompleter<E> {
* final E[] array; final AtomicReference<E> result; final int lo, hi;
* Searcher(CountedCompleter<?> p, E[] array, AtomicReference<E> result, int lo, int hi) {
* super(p);
* this.array = array; this.result = result; this.lo = lo; this.hi = hi;
* }
* public E getRawResult() { return result.get(); }
* public void compute() { // similar to ForEach version 3
* int l = lo, h = hi;
* while (result.get() == null && h >= l) {
* if (h - l >= 2) {
* int mid = (l + h) >>> 1;
* addToPendingCount(1);
* new Searcher(this, array, result, mid, h).fork();
* h = mid;
* }
* else {
* E x = array[l];
* if (matches(x) && result.compareAndSet(null, x))
* quietlyCompleteRoot(); // root task is now joinable
* break;
* }
* }
* tryComplete(); // normally complete whether or not found
* }
* boolean matches(E e) { ... } // return true if found
*
* public static <E> E search(E[] array) {
* return new Searcher<E>(null, array, new AtomicReference<E>(), 0, array.length).invoke();
* }
* }}</pre>
*
* In this example, as well as others in which tasks have no other
* effects except to compareAndSet a common result, the trailing
* unconditional invocation of {@code tryComplete} could be made
* conditional ({@code if (result.get() == null) tryComplete();})
* because no further bookkeeping is required to manage completions
* once the root task completes.
*
* <p><b>Recording subtasks.</b> CountedCompleter tasks that combine
* results of multiple subtasks usually need to access these results
* in method {@link #onCompletion}. As illustrated in the following
* class (that performs a simplified form of map-reduce where mappings
* and reductions are all of type {@code E}), one way to do this in
* divide and conquer designs is to have each subtask record its
* sibling, so that it can be accessed in method {@code onCompletion}.
* This technique applies to reductions in which the order of
* combining left and right results does not matter; ordered
* reductions require explicit left/right designations. Variants of
* other streamlinings seen in the above examples may also apply.
*
* <pre> {@code
* class MyMapper<E> { E apply(E v) { ... } }
* class MyReducer<E> { E apply(E x, E y) { ... } }
* class MapReducer<E> extends CountedCompleter<E> {
* final E[] array; final MyMapper<E> mapper;
* final MyReducer<E> reducer; final int lo, hi;
* MapReducer<E> sibling;
* E result;
* MapReducer(CountedCompleter<?> p, E[] array, MyMapper<E> mapper,
* MyReducer<E> reducer, int lo, int hi) {
* super(p);
* this.array = array; this.mapper = mapper;
* this.reducer = reducer; this.lo = lo; this.hi = hi;
* }
* public void compute() {
* if (hi - lo >= 2) {
* int mid = (lo + hi) >>> 1;
* MapReducer<E> left = new MapReducer(this, array, mapper, reducer, lo, mid);
* MapReducer<E> right = new MapReducer(this, array, mapper, reducer, mid, hi);
* left.sibling = right;
* right.sibling = left;
* setPendingCount(1); // only right is pending
* right.fork();
* left.compute(); // directly execute left
* }
* else {
* if (hi > lo)
* result = mapper.apply(array[lo]);
* tryComplete();
* }
* }
* public void onCompletion(CountedCompleter<?> caller) {
* if (caller != this) {
* MapReducer<E> child = (MapReducer<E>)caller;
* MapReducer<E> sib = child.sibling;
* if (sib == null || sib.result == null)
* result = child.result;
* else
* result = reducer.apply(child.result, sib.result);
* }
* }
* public E getRawResult() { return result; }
*
* public static <E> E mapReduce(E[] array, MyMapper<E> mapper, MyReducer<E> reducer) {
* return new MapReducer<E>(null, array, mapper, reducer,
* 0, array.length).invoke();
* }
* }}</pre>
*
* Here, method {@code onCompletion} takes a form common to many
* completion designs that combine results. This callback-style method
* is triggered once per task, in either of the two different contexts
* in which the pending count is, or becomes, zero: (1) by a task
* itself, if its pending count is zero upon invocation of {@code
* tryComplete}, or (2) by any of its subtasks when they complete and
* decrement the pending count to zero. The {@code caller} argument
* distinguishes cases. Most often, when the caller is {@code this},
* no action is necessary. Otherwise the caller argument can be used
* (usually via a cast) to supply a value (and/or links to other
* values) to be combined. Assuming proper use of pending counts, the
* actions inside {@code onCompletion} occur (once) upon completion of
* a task and its subtasks. No additional synchronization is required
* within this method to ensure thread safety of accesses to fields of
* this task or other completed tasks.
*
* <p><b>Completion Traversals</b>. If using {@code onCompletion} to
* process completions is inapplicable or inconvenient, you can use
* methods {@link #firstComplete} and {@link #nextComplete} to create
* custom traversals. For example, to define a MapReducer that only
* splits out right-hand tasks in the form of the third ForEach
* example, the completions must cooperatively reduce along
* unexhausted subtask links, which can be done as follows:
*
* <pre> {@code
* class MapReducer<E> extends CountedCompleter<E> { // version 2
* final E[] array; final MyMapper<E> mapper;
* final MyReducer<E> reducer; final int lo, hi;
* MapReducer<E> forks, next; // record subtask forks in list
* E result;
* MapReducer(CountedCompleter<?> p, E[] array, MyMapper<E> mapper,
* MyReducer<E> reducer, int lo, int hi, MapReducer<E> next) {
* super(p);
* this.array = array; this.mapper = mapper;
* this.reducer = reducer; this.lo = lo; this.hi = hi;
* this.next = next;
* }
* public void compute() {
* int l = lo, h = hi;
* while (h - l >= 2) {
* int mid = (l + h) >>> 1;
* addToPendingCount(1);
* (forks = new MapReducer(this, array, mapper, reducer, mid, h, forks)).fork;
* h = mid;
* }
* if (h > l)
* result = mapper.apply(array[l]);
* // process completions by reducing along and advancing subtask links
* for (CountedCompleter<?> c = firstComplete(); c != null; c = c.nextComplete()) {
* for (MapReducer t = (MapReducer)c, s = t.forks; s != null; s = t.forks = s.next)
* t.result = reducer.apply(t.result, s.result);
* }
* }
* public E getRawResult() { return result; }
*
* public static <E> E mapReduce(E[] array, MyMapper<E> mapper, MyReducer<E> reducer) {
* return new MapReducer<E>(null, array, mapper, reducer,
* 0, array.length, null).invoke();
* }
* }}</pre>
*
* <p><b>Triggers.</b> Some CountedCompleters are themselves never
* forked, but instead serve as bits of plumbing in other designs;
* including those in which the completion of one of more async tasks
* triggers another async task. For example:
*
* <pre> {@code
* class HeaderBuilder extends CountedCompleter<...> { ... }
* class BodyBuilder extends CountedCompleter<...> { ... }
* class PacketSender extends CountedCompleter<...> {
* PacketSender(...) { super(null, 1); ... } // trigger on second completion
* public void compute() { } // never called
* public void onCompletion(CountedCompleter<?> caller) { sendPacket(); }
* }
* // sample use:
* PacketSender p = new PacketSender();
* new HeaderBuilder(p, ...).fork();
* new BodyBuilder(p, ...).fork();
* }</pre>
*
* @since 1.8
* @author Doug Lea
*/
@SuppressWarnings("all")
abstract class CountedCompleter<T> extends ForkJoinTask<T> {
private static final long serialVersionUID = 5232453752276485070L;
/** This task's completer, or null if none */
final CountedCompleter<?> completer;
/** The number of pending tasks until completion */
volatile int pending;
/**
* Creates a new CountedCompleter with the given completer
* and initial pending count.
*
* @param completer this task's completer, or {@code null} if none
* @param initialPendingCount the initial pending count
*/
protected CountedCompleter(CountedCompleter<?> completer,
int initialPendingCount) {
this.completer = completer;
this.pending = initialPendingCount;
}
/**
* Creates a new CountedCompleter with the given completer
* and an initial pending count of zero.
*
* @param completer this task's completer, or {@code null} if none
*/
protected CountedCompleter(CountedCompleter<?> completer) {
this.completer = completer;
}
/**
* Creates a new CountedCompleter with no completer
* and an initial pending count of zero.
*/
protected CountedCompleter() {
this.completer = null;
}
/**
* The main computation performed by this task.
*/
public abstract void compute();
/**
* Performs an action when method {@link #tryComplete} is invoked
* and the pending count is zero, or when the unconditional
* method {@link #complete} is invoked. By default, this method
* does nothing. You can distinguish cases by checking the
* identity of the given caller argument. If not equal to {@code
* this}, then it is typically a subtask that may contain results
* (and/or links to other results) to combine.
*
* @param caller the task invoking this method (which may
* be this task itself)
*/
public void onCompletion(CountedCompleter<?> caller) {
}
/**
* Performs an action when method {@link #completeExceptionally}
* is invoked or method {@link #compute} throws an exception, and
* this task has not otherwise already completed normally. On
* entry to this method, this task {@link
* ForkJoinTask#isCompletedAbnormally}. The return value of this
* method controls further propagation: If {@code true} and this
* task has a completer, then this completer is also completed
* exceptionally. The default implementation of this method does
* nothing except return {@code true}.
*
* @param ex the exception
* @param caller the task invoking this method (which may
* be this task itself)
* @return true if this exception should be propagated to this
* task's completer, if one exists
*/
public boolean onExceptionalCompletion(Throwable ex, CountedCompleter<?> caller) {
return true;
}
/**
* Returns the completer established in this task's constructor,
* or {@code null} if none.
*
* @return the completer
*/
public final CountedCompleter<?> getCompleter() {
return completer;
}
/**
* Returns the current pending count.
*
* @return the current pending count
*/
public final int getPendingCount() {
return pending;
}
/**
* Sets the pending count to the given value.
*
* @param count the count
*/
public final void setPendingCount(int count) {
pending = count;
}
/**
* Adds (atomically) the given value to the pending count.
*
* @param delta the value to add
*/
public final void addToPendingCount(int delta) {
int c; // note: can replace with intrinsic in jdk8
do {} while (!U.compareAndSwapInt(this, PENDING, c = pending, c+delta));
}
/**
* Sets (atomically) the pending count to the given count only if
* it currently holds the given expected value.
*
* @param expected the expected value
* @param count the new value
* @return true if successful
*/
public final boolean compareAndSetPendingCount(int expected, int count) {
return U.compareAndSwapInt(this, PENDING, expected, count);
}
/**
* If the pending count is nonzero, (atomically) decrements it.
*
* @return the initial (undecremented) pending count holding on entry
* to this method
*/
public final int decrementPendingCountUnlessZero() {
int c;
do {} while ((c = pending) != 0 &&
!U.compareAndSwapInt(this, PENDING, c, c - 1));
return c;
}
/**
* Returns the root of the current computation; i.e., this
* task if it has no completer, else its completer's root.
*
* @return the root of the current computation
*/
public final CountedCompleter<?> getRoot() {
CountedCompleter<?> a = this, p;
while ((p = a.completer) != null)
a = p;
return a;
}
/**
* If the pending count is nonzero, decrements the count;
* otherwise invokes {@link #onCompletion} and then similarly
* tries to complete this task's completer, if one exists,
* else marks this task as complete.
*/
public final void tryComplete() {
CountedCompleter<?> a = this, s = a;
for (int c;;) {
if ((c = a.pending) == 0) {
a.onCompletion(s);
if ((a = (s = a).completer) == null) {
s.quietlyComplete();
return;
}
}
else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
return;
}
}
/**
* Equivalent to {@link #tryComplete} but does not invoke {@link
* #onCompletion} along the completion path: If the pending count
* is nonzero, decrements the count; otherwise, similarly tries to
* complete this task's completer, if one exists, else marks this
* task as complete. This method may be useful in cases where
* {@code onCompletion} should not, or need not, be invoked for
* each completer in a computation.
*/
public final void propagateCompletion() {
CountedCompleter<?> a = this, s = a;
for (int c;;) {
if ((c = a.pending) == 0) {
if ((a = (s = a).completer) == null) {
s.quietlyComplete();
return;
}
}
else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
return;
}
}
/**
* Regardless of pending count, invokes {@link #onCompletion},
* marks this task as complete and further triggers {@link
* #tryComplete} on this task's completer, if one exists. The
* given rawResult is used as an argument to {@link #setRawResult}
* before invoking {@link #onCompletion} or marking this task as
* complete; its value is meaningful only for classes overriding
* {@code setRawResult}.
*
* <p>This method may be useful when forcing completion as soon as
* any one (versus all) of several subtask results are obtained.
* However, in the common (and recommended) case in which {@code
* setRawResult} is not overridden, this effect can be obtained
* more simply using {@code quietlyCompleteRoot();}.
*
* @param rawResult the raw result
*/
public void complete(T rawResult) {
CountedCompleter<?> p;
setRawResult(rawResult);
onCompletion(this);
quietlyComplete();
if ((p = completer) != null)
p.tryComplete();
}
/**
* If this task's pending count is zero, returns this task;
* otherwise decrements its pending count and returns {@code
* null}. This method is designed to be used with {@link
* #nextComplete} in completion traversal loops.
*
* @return this task, if pending count was zero, else {@code null}
*/
public final CountedCompleter<?> firstComplete() {
for (int c;;) {
if ((c = pending) == 0)
return this;
else if (U.compareAndSwapInt(this, PENDING, c, c - 1))
return null;
}
}
/**
* If this task does not have a completer, invokes {@link
* ForkJoinTask#quietlyComplete} and returns {@code null}. Or, if
* this task's pending count is non-zero, decrements its pending
* count and returns {@code null}. Otherwise, returns the
* completer. This method can be used as part of a completion
* traversal loop for homogeneous task hierarchies:
*
* <pre> {@code
* for (CountedCompleter<?> c = firstComplete();
* c != null;
* c = c.nextComplete()) {
* // ... process c ...
* }}</pre>
*
* @return the completer, or {@code null} if none
*/
public final CountedCompleter<?> nextComplete() {
CountedCompleter<?> p;
if ((p = completer) != null)
return p.firstComplete();
else {
quietlyComplete();
return null;
}
}
/**
* Equivalent to {@code getRoot().quietlyComplete()}.
*/
public final void quietlyCompleteRoot() {
for (CountedCompleter<?> a = this, p;;) {
if ((p = a.completer) == null) {
a.quietlyComplete();
return;
}
a = p;
}
}
/**
* Supports ForkJoinTask exception propagation.
*/
void internalPropagateException(Throwable ex) {
CountedCompleter<?> a = this, s = a;
while (a.onExceptionalCompletion(ex, s) &&
(a = (s = a).completer) != null && a.status >= 0)
a.recordExceptionalCompletion(ex);
}
/**
* Implements execution conventions for CountedCompleters.
*/
protected final boolean exec() {
compute();
return false;
}
/**
* Returns the result of the computation. By default
* returns {@code null}, which is appropriate for {@code Void}
* actions, but in other cases should be overridden, almost
* always to return a field or function of a field that
* holds the result upon completion.
*
* @return the result of the computation
*/
public T getRawResult() { return null; }
/**
* A method that result-bearing CountedCompleters may optionally
* use to help maintain result data. By default, does nothing.
* Overrides are not recommended. However, if this method is
* overridden to update existing objects or fields, then it must
* in general be defined to be thread-safe.
*/
protected void setRawResult(T t) { }
// Unsafe mechanics
private static final sun.misc.Unsafe U;
private static final long PENDING;
static {
try {
U = getUnsafe();
PENDING = U.objectFieldOffset
(CountedCompleter.class.getDeclaredField("pending"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
}
| |
package org.likelist.po;
import java.util.Date;
/**
* EsjUserInfo entity. @author MyEclipse Persistence Tools
*/
public class EsjUserInfo implements java.io.Serializable {
// Fields
private Integer userId;
private String fullName;
private String password;
private String firstName;
private String midName;
private String lastName;
private String email;
private boolean emailChecked;
private Integer emailCheckCode;
private boolean sex;
private String qq;
private String msn;
private String avatarDir;
private Date birthDay;
private Integer userAddrId;
private Integer level;
private Integer userSchoolId;
private Integer albumId;
private String companyName;
private String email2;
private String hobby;
private String bloodType;
private String favouriteFood;
private String favouriteArea;
private boolean married;
private String residency;
private String birthPlace;
private Integer operationTimes;
private Integer loginTimes;
private String alipayAccount;
private Date createTime;
private String country;
private String province;
private String phone;
private String city;
private String district;
private String street;
private Integer zipCode;
private String fullAddress;
private String university;
private String universityClass;
private String middleSchool;
private Date lastUpdate;
private Date lastLogin;
private String seekHelp;
// Constructors
/** default constructor */
public EsjUserInfo() {
}
/** minimal constructor */
public EsjUserInfo(String fullName, String password, String firstName,
String midName, String lastName, String email,
boolean emailChecked, Integer emailCheckCode, Integer albumId,
Date createTime, Date lastUpdate) {
this.fullName = fullName;
this.password = password;
this.firstName = firstName;
this.midName = midName;
this.lastName = lastName;
this.email = email;
this.emailChecked = emailChecked;
this.emailCheckCode = emailCheckCode;
this.albumId = albumId;
this.createTime = createTime;
this.lastUpdate = lastUpdate;
}
/** full constructor */
public EsjUserInfo(String fullName, String password, String firstName,
String midName, String lastName, String email,
boolean emailChecked, Integer emailCheckCode, boolean sex,
String qq, String msn, String avatarDir, Date birthDay,
Integer userAddrId, Integer level, Integer userSchoolId,
Integer albumId, String companyName, String email2, String hobby,
String bloodType, String favouriteFood, String favouriteArea,
boolean married, String residency, String birthPlace,
Integer operationTimes, Integer loginTimes, String alipayAccount,
Date createTime, String country, String province, String phone,
String city, String district, String street, Integer zipCode,
String fullAddress, String university, String universityClass,
String middleSchool, Date lastUpdate, Date lastLogin,
String seekHelp) {
this.fullName = fullName;
this.password = password;
this.firstName = firstName;
this.midName = midName;
this.lastName = lastName;
this.email = email;
this.emailChecked = emailChecked;
this.emailCheckCode = emailCheckCode;
this.sex = sex;
this.qq = qq;
this.msn = msn;
this.avatarDir = avatarDir;
this.birthDay = birthDay;
this.userAddrId = userAddrId;
this.level = level;
this.userSchoolId = userSchoolId;
this.albumId = albumId;
this.companyName = companyName;
this.email2 = email2;
this.hobby = hobby;
this.bloodType = bloodType;
this.favouriteFood = favouriteFood;
this.favouriteArea = favouriteArea;
this.married = married;
this.residency = residency;
this.birthPlace = birthPlace;
this.operationTimes = operationTimes;
this.loginTimes = loginTimes;
this.alipayAccount = alipayAccount;
this.createTime = createTime;
this.country = country;
this.province = province;
this.phone = phone;
this.city = city;
this.district = district;
this.street = street;
this.zipCode = zipCode;
this.fullAddress = fullAddress;
this.university = university;
this.universityClass = universityClass;
this.middleSchool = middleSchool;
this.lastUpdate = lastUpdate;
this.lastLogin = lastLogin;
this.seekHelp = seekHelp;
}
// Property accessors
public Integer getUserId() {
return this.userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
public String getFullName() {
return this.fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
this.password = password;
}
public String getFirstName() {
return this.firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getMidName() {
return this.midName;
}
public void setMidName(String midName) {
this.midName = midName;
}
public String getLastName() {
return this.lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return this.email;
}
public void setEmail(String email) {
this.email = email;
}
public boolean getEmailChecked() {
return this.emailChecked;
}
public void setEmailChecked(boolean emailChecked) {
this.emailChecked = emailChecked;
}
public Integer getEmailCheckCode() {
return this.emailCheckCode;
}
public void setEmailCheckCode(Integer emailCheckCode) {
this.emailCheckCode = emailCheckCode;
}
public boolean getSex() {
return this.sex;
}
public void setSex(boolean sex) {
this.sex = sex;
}
public String getQq() {
return this.qq;
}
public void setQq(String qq) {
this.qq = qq;
}
public String getMsn() {
return this.msn;
}
public void setMsn(String msn) {
this.msn = msn;
}
public String getAvatarDir() {
return this.avatarDir;
}
public void setAvatarDir(String avatarDir) {
this.avatarDir = avatarDir;
}
public Date getBirthDay() {
return this.birthDay;
}
public void setBirthDay(Date birthDay) {
this.birthDay = birthDay;
}
public Integer getUserAddrId() {
return this.userAddrId;
}
public void setUserAddrId(Integer userAddrId) {
this.userAddrId = userAddrId;
}
public Integer getLevel() {
return this.level;
}
public void setLevel(Integer level) {
this.level = level;
}
public Integer getUserSchoolId() {
return this.userSchoolId;
}
public void setUserSchoolId(Integer userSchoolId) {
this.userSchoolId = userSchoolId;
}
public Integer getAlbumId() {
return this.albumId;
}
public void setAlbumId(Integer albumId) {
this.albumId = albumId;
}
public String getCompanyName() {
return this.companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getEmail2() {
return this.email2;
}
public void setEmail2(String email2) {
this.email2 = email2;
}
public String getHobby() {
return this.hobby;
}
public void setHobby(String hobby) {
this.hobby = hobby;
}
public String getBloodType() {
return this.bloodType;
}
public void setBloodType(String bloodType) {
this.bloodType = bloodType;
}
public String getFavouriteFood() {
return this.favouriteFood;
}
public void setFavouriteFood(String favouriteFood) {
this.favouriteFood = favouriteFood;
}
public String getFavouriteArea() {
return this.favouriteArea;
}
public void setFavouriteArea(String favouriteArea) {
this.favouriteArea = favouriteArea;
}
public boolean getMarried() {
return this.married;
}
public void setMarried(boolean married) {
this.married = married;
}
public String getResidency() {
return this.residency;
}
public void setResidency(String residency) {
this.residency = residency;
}
public String getBirthPlace() {
return this.birthPlace;
}
public void setBirthPlace(String birthPlace) {
this.birthPlace = birthPlace;
}
public Integer getOperationTimes() {
return this.operationTimes;
}
public void setOperationTimes(Integer operationTimes) {
this.operationTimes = operationTimes;
}
public Integer getLoginTimes() {
return this.loginTimes;
}
public void setLoginTimes(Integer loginTimes) {
this.loginTimes = loginTimes;
}
public String getAlipayAccount() {
return this.alipayAccount;
}
public void setAlipayAccount(String alipayAccount) {
this.alipayAccount = alipayAccount;
}
public Date getCreateTime() {
return this.createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getCountry() {
return this.country;
}
public void setCountry(String country) {
this.country = country;
}
public String getProvince() {
return this.province;
}
public void setProvince(String province) {
this.province = province;
}
public String getPhone() {
return this.phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getCity() {
return this.city;
}
public void setCity(String city) {
this.city = city;
}
public String getDistrict() {
return this.district;
}
public void setDistrict(String district) {
this.district = district;
}
public String getStreet() {
return this.street;
}
public void setStreet(String street) {
this.street = street;
}
public Integer getZipCode() {
return this.zipCode;
}
public void setZipCode(Integer zipCode) {
this.zipCode = zipCode;
}
public String getFullAddress() {
return this.fullAddress;
}
public void setFullAddress(String fullAddress) {
this.fullAddress = fullAddress;
}
public String getUniversity() {
return this.university;
}
public void setUniversity(String university) {
this.university = university;
}
public String getUniversityClass() {
return this.universityClass;
}
public void setUniversityClass(String universityClass) {
this.universityClass = universityClass;
}
public String getMiddleSchool() {
return this.middleSchool;
}
public void setMiddleSchool(String middleSchool) {
this.middleSchool = middleSchool;
}
public Date getLastUpdate() {
return this.lastUpdate;
}
public void setLastUpdate(Date lastUpdate) {
this.lastUpdate = lastUpdate;
}
public Date getLastLogin() {
return this.lastLogin;
}
public void setLastLogin(Date lastLogin) {
this.lastLogin = lastLogin;
}
public String getSeekHelp() {
return this.seekHelp;
}
public void setSeekHelp(String seekHelp) {
this.seekHelp = seekHelp;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.audit;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.action.Action;
import org.apache.nifi.action.Component;
import org.apache.nifi.action.FlowChangeAction;
import org.apache.nifi.action.Operation;
import org.apache.nifi.action.component.details.FlowChangeExtensionDetails;
import org.apache.nifi.action.details.ActionDetails;
import org.apache.nifi.action.details.FlowChangeConfigureDetails;
import org.apache.nifi.authorization.user.NiFiUser;
import org.apache.nifi.authorization.user.NiFiUserUtils;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.ProcessorNode;
import org.apache.nifi.controller.ScheduledState;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.web.api.dto.ProcessorConfigDTO;
import org.apache.nifi.web.api.dto.ProcessorDTO;
import org.apache.nifi.web.dao.ProcessorDAO;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* Audits processor creation/removal and configuration changes.
*/
@Aspect
public class ProcessorAuditor extends NiFiAuditor {
private static final Logger logger = LoggerFactory.getLogger(ProcessorAuditor.class);
private static final String COMMENTS = "Comments";
private static final String PENALTY_DURATION = "Penalty Duration";
private static final String YIELD_DURATION = "Yield Duration";
private static final String CONCURRENTLY_SCHEDULABLE_TASKS = "Concurrent Tasks";
private static final String NAME = "Name";
private static final String BULLETIN_LEVEL = "Bulletin Level";
private static final String ANNOTATION_DATA = "Annotation Data";
private static final String AUTO_TERMINATED_RELATIONSHIPS = "Auto Terminated Relationships";
private static final String SCHEDULING_PERIOD = "Run Schedule";
private static final String SCHEDULING_STRATEGY = "Scheduling Strategy";
private static final String EXECUTION_NODE = "Execution Node";
/**
* Audits the creation of processors via createProcessor().
*
* This method only needs to be run 'after returning'. However, in Java 7 the order in which these methods are returned from Class.getDeclaredMethods (even though there is no order guaranteed)
* seems to differ from Java 6. SpringAOP depends on this ordering to determine advice precedence. By normalizing all advice into Around advice we can alleviate this issue.
*
* @param proceedingJoinPoint join point
* @return node
* @throws java.lang.Throwable ex
*/
@Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
+ "execution(org.apache.nifi.controller.ProcessorNode createProcessor(java.lang.String, org.apache.nifi.web.api.dto.ProcessorDTO))")
public ProcessorNode createProcessorAdvice(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
// update the processor state
ProcessorNode processor = (ProcessorNode) proceedingJoinPoint.proceed();
// if no exceptions were thrown, add the processor action...
final Action action = generateAuditRecord(processor, Operation.Add);
// save the actions
if (action != null) {
saveAction(action, logger);
}
return processor;
}
/**
* Audits the configuration of a single processor.
*
* @param proceedingJoinPoint join point
* @param processorDTO dto
* @param processorDAO dao
* @return node
* @throws Throwable ex
*/
@Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
+ "execution(org.apache.nifi.controller.ProcessorNode updateProcessor(org.apache.nifi.web.api.dto.ProcessorDTO)) && "
+ "args(processorDTO) && "
+ "target(processorDAO)")
public ProcessorNode updateProcessorAdvice(ProceedingJoinPoint proceedingJoinPoint, ProcessorDTO processorDTO, ProcessorDAO processorDAO) throws Throwable {
// determine the initial values for each property/setting that's changing
ProcessorNode processor = processorDAO.getProcessor(processorDTO.getId());
final Map<String, String> values = extractConfiguredPropertyValues(processor, processorDTO);
final ScheduledState scheduledState = processor.getScheduledState();
// update the processor state
final ProcessorNode updatedProcessor = (ProcessorNode) proceedingJoinPoint.proceed();
// if no exceptions were thrown, add the processor action...
processor = processorDAO.getProcessor(updatedProcessor.getIdentifier());
// get the current user
NiFiUser user = NiFiUserUtils.getNiFiUser();
// ensure the user was found
if (user != null) {
// determine the updated values
Map<String, String> updatedValues = extractConfiguredPropertyValues(processor, processorDTO);
// create the processor details
FlowChangeExtensionDetails processorDetails = new FlowChangeExtensionDetails();
processorDetails.setType(processor.getComponentType());
// create a processor action
Date actionTimestamp = new Date();
Collection<Action> actions = new ArrayList<>();
// go through each updated value
for (String property : updatedValues.keySet()) {
String newValue = updatedValues.get(property);
String oldValue = values.get(property);
Operation operation = null;
// determine the type of operation
if (oldValue == null || newValue == null || !newValue.equals(oldValue)) {
operation = Operation.Configure;
}
// create a configuration action accordingly
if (operation != null) {
// clear the value if this property is sensitive
final PropertyDescriptor propertyDescriptor = processor.getProcessor().getPropertyDescriptor(property);
if (propertyDescriptor != null && propertyDescriptor.isSensitive()) {
if (newValue != null) {
newValue = "********";
}
if (oldValue != null) {
oldValue = "********";
}
} else if (ANNOTATION_DATA.equals(property)) {
if (newValue != null) {
newValue = "<annotation data not shown>";
}
if (oldValue != null) {
oldValue = "<annotation data not shown>";
}
}
final FlowChangeConfigureDetails actionDetails = new FlowChangeConfigureDetails();
actionDetails.setName(property);
actionDetails.setValue(newValue);
actionDetails.setPreviousValue(oldValue);
// create a configuration action
FlowChangeAction configurationAction = new FlowChangeAction();
configurationAction.setUserIdentity(user.getIdentity());
configurationAction.setOperation(operation);
configurationAction.setTimestamp(actionTimestamp);
configurationAction.setSourceId(processor.getIdentifier());
configurationAction.setSourceName(processor.getName());
configurationAction.setSourceType(Component.Processor);
configurationAction.setComponentDetails(processorDetails);
configurationAction.setActionDetails(actionDetails);
actions.add(configurationAction);
}
}
// determine the new executing state
final ScheduledState updatedScheduledState = processor.getScheduledState();
// determine if the running state has changed and its not disabled
if (scheduledState != updatedScheduledState) {
// create a processor action
FlowChangeAction processorAction = new FlowChangeAction();
processorAction.setUserIdentity(user.getIdentity());
processorAction.setTimestamp(new Date());
processorAction.setSourceId(processor.getIdentifier());
processorAction.setSourceName(processor.getName());
processorAction.setSourceType(Component.Processor);
processorAction.setComponentDetails(processorDetails);
// set the operation accordingly
if (ScheduledState.RUNNING.equals(updatedScheduledState)) {
processorAction.setOperation(Operation.Start);
} else if (ScheduledState.DISABLED.equals(updatedScheduledState)) {
processorAction.setOperation(Operation.Disable);
} else {
// state is now stopped... consider the previous state
if (ScheduledState.RUNNING.equals(scheduledState)) {
processorAction.setOperation(Operation.Stop);
} else if (ScheduledState.DISABLED.equals(scheduledState)) {
processorAction.setOperation(Operation.Enable);
}
}
actions.add(processorAction);
}
// ensure there are actions to record
if (!actions.isEmpty()) {
// save the actions
saveActions(actions, logger);
}
}
return updatedProcessor;
}
/**
* Audits the removal of a processor via deleteProcessor().
*
* @param proceedingJoinPoint join point
* @param processorId processor id
* @param processorDAO dao
* @throws Throwable ex
*/
@Around("within(org.apache.nifi.web.dao.ProcessorDAO+) && "
+ "execution(void deleteProcessor(java.lang.String)) && "
+ "args(processorId) && "
+ "target(processorDAO)")
public void removeProcessorAdvice(ProceedingJoinPoint proceedingJoinPoint, String processorId, ProcessorDAO processorDAO) throws Throwable {
// get the processor before removing it
ProcessorNode processor = processorDAO.getProcessor(processorId);
// remove the processor
proceedingJoinPoint.proceed();
// if no exceptions were thrown, add removal actions...
// audit the processor removal
final Action action = generateAuditRecord(processor, Operation.Remove);
// save the actions
if (action != null) {
saveAction(action, logger);
}
}
/**
* Generates an audit record for the creation of a processor.
*
* @param processor processor
* @param operation operation
* @return action
*/
public Action generateAuditRecord(ProcessorNode processor, Operation operation) {
return generateAuditRecord(processor, operation, null);
}
/**
* Generates an audit record for the creation of a processor.
*
* @param processor processor
* @param operation operation
* @param actionDetails details
* @return action
*/
public Action generateAuditRecord(ProcessorNode processor, Operation operation, ActionDetails actionDetails) {
FlowChangeAction action = null;
// get the current user
NiFiUser user = NiFiUserUtils.getNiFiUser();
// ensure the user was found
if (user != null) {
// create the processor details
FlowChangeExtensionDetails processorDetails = new FlowChangeExtensionDetails();
processorDetails.setType(processor.getComponentType());
// create the processor action for adding this processor
action = new FlowChangeAction();
action.setUserIdentity(user.getIdentity());
action.setOperation(operation);
action.setTimestamp(new Date());
action.setSourceId(processor.getIdentifier());
action.setSourceName(processor.getName());
action.setSourceType(Component.Processor);
action.setComponentDetails(processorDetails);
if (actionDetails != null) {
action.setActionDetails(actionDetails);
}
}
return action;
}
/**
* Extracts the values for the configured properties from the specified Processor.
*/
private Map<String, String> extractConfiguredPropertyValues(ProcessorNode processor, ProcessorDTO processorDTO) {
Map<String, String> values = new HashMap<>();
if (processorDTO.getName() != null) {
values.put(NAME, processor.getName());
}
if (processorDTO.getConfig() != null) {
ProcessorConfigDTO newConfig = processorDTO.getConfig();
if (newConfig.getConcurrentlySchedulableTaskCount() != null) {
values.put(CONCURRENTLY_SCHEDULABLE_TASKS, String.valueOf(processor.getMaxConcurrentTasks()));
}
if (newConfig.getPenaltyDuration() != null) {
values.put(PENALTY_DURATION, processor.getPenalizationPeriod());
}
if (newConfig.getYieldDuration() != null) {
values.put(YIELD_DURATION, processor.getYieldPeriod());
}
if (newConfig.getBulletinLevel() != null) {
values.put(BULLETIN_LEVEL, processor.getBulletinLevel().name());
}
if (newConfig.getAnnotationData() != null) {
values.put(ANNOTATION_DATA, processor.getAnnotationData());
}
if (newConfig.getSchedulingPeriod() != null) {
values.put(SCHEDULING_PERIOD, String.valueOf(processor.getSchedulingPeriod()));
}
if (newConfig.getAutoTerminatedRelationships() != null) {
// get each of the auto terminated relationship names
final Set<Relationship> autoTerminatedRelationships = processor.getAutoTerminatedRelationships();
final List<String> autoTerminatedRelationshipNames = new ArrayList<>(autoTerminatedRelationships.size());
for (final Relationship relationship : autoTerminatedRelationships) {
autoTerminatedRelationshipNames.add(relationship.getName());
}
// sort them and include in the configuration
Collections.sort(autoTerminatedRelationshipNames, Collator.getInstance(Locale.US));
values.put(AUTO_TERMINATED_RELATIONSHIPS, StringUtils.join(autoTerminatedRelationshipNames, ", "));
}
if (newConfig.getProperties() != null) {
// for each property specified, extract its configured value
Map<String, String> properties = newConfig.getProperties();
Map<PropertyDescriptor, String> configuredProperties = processor.getProperties();
for (String propertyName : properties.keySet()) {
// build a descriptor for getting the configured value
PropertyDescriptor propertyDescriptor = new PropertyDescriptor.Builder().name(propertyName).build();
String configuredPropertyValue = configuredProperties.get(propertyDescriptor);
// if the configured value couldn't be found, use the default value from the actual descriptor
if (configuredPropertyValue == null) {
propertyDescriptor = locatePropertyDescriptor(configuredProperties.keySet(), propertyDescriptor);
configuredPropertyValue = propertyDescriptor.getDefaultValue();
}
values.put(propertyName, configuredPropertyValue);
}
}
if (newConfig.getComments() != null) {
values.put(COMMENTS, processor.getComments());
}
if (newConfig.getSchedulingStrategy() != null) {
values.put(SCHEDULING_STRATEGY, processor.getSchedulingStrategy().name());
}
if (newConfig.getExecutionNode() != null) {
values.put(EXECUTION_NODE, processor.getExecutionNode().name());
}
}
return values;
}
/**
* Locates the actual property descriptor for the given spec property descriptor.
*
* @param propertyDescriptors properties
* @param specDescriptor example property
* @return property
*/
private PropertyDescriptor locatePropertyDescriptor(Set<PropertyDescriptor> propertyDescriptors, PropertyDescriptor specDescriptor) {
for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {
if (propertyDescriptor.equals(specDescriptor)) {
return propertyDescriptor;
}
}
return specDescriptor;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/osconfig/v1alpha/vulnerability.proto
package com.google.cloud.osconfig.v1alpha;
public final class VulnerabilityProto {
private VulnerabilityProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_Reference_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_Reference_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Item_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Item_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_GetVulnerabilityReportRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_GetVulnerabilityReportRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1alpha_CVSSv3_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1alpha_CVSSv3_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n1google/cloud/osconfig/v1alpha/vulnerab"
+ "ility.proto\022\035google.cloud.osconfig.v1alp"
+ "ha\032\034google/api/annotations.proto\032\037google"
+ "/api/field_behavior.proto\032\031google/api/re"
+ "source.proto\032\037google/protobuf/timestamp."
+ "proto\"\334\010\n\023VulnerabilityReport\022\021\n\004name\030\001 "
+ "\001(\tB\003\340A\003\022^\n\017vulnerabilities\030\002 \003(\0132@.goog"
+ "le.cloud.osconfig.v1alpha.VulnerabilityR"
+ "eport.VulnerabilityB\003\340A\003\0224\n\013update_time\030"
+ "\003 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\032\226"
+ "\006\n\rVulnerability\022Y\n\007details\030\001 \001(\0132H.goog"
+ "le.cloud.osconfig.v1alpha.VulnerabilityR"
+ "eport.Vulnerability.Details\022(\n\034installed"
+ "_inventory_item_ids\030\002 \003(\tB\002\030\001\022(\n\034availab"
+ "le_inventory_item_ids\030\003 \003(\tB\002\030\001\022/\n\013creat"
+ "e_time\030\004 \001(\0132\032.google.protobuf.Timestamp"
+ "\022/\n\013update_time\030\005 \001(\0132\032.google.protobuf."
+ "Timestamp\022T\n\005items\030\006 \003(\0132E.google.cloud."
+ "osconfig.v1alpha.VulnerabilityReport.Vul"
+ "nerability.Item\032\236\002\n\007Details\022\013\n\003cve\030\001 \001(\t"
+ "\022\025\n\rcvss_v2_score\030\002 \001(\002\0226\n\007cvss_v3\030\003 \001(\013"
+ "2%.google.cloud.osconfig.v1alpha.CVSSv3\022"
+ "\020\n\010severity\030\004 \001(\t\022\023\n\013description\030\005 \001(\t\022f"
+ "\n\nreferences\030\006 \003(\0132R.google.cloud.osconf"
+ "ig.v1alpha.VulnerabilityReport.Vulnerabi"
+ "lity.Details.Reference\032(\n\tReference\022\013\n\003u"
+ "rl\030\001 \001(\t\022\016\n\006source\030\002 \001(\t\032}\n\004Item\022#\n\033inst"
+ "alled_inventory_item_id\030\001 \001(\t\022#\n\033availab"
+ "le_inventory_item_id\030\002 \001(\t\022\025\n\rfixed_cpe_"
+ "uri\030\003 \001(\t\022\024\n\014upstream_fix\030\004 \001(\t:\202\001\352A\177\n+o"
+ "sconfig.googleapis.com/VulnerabilityRepo"
+ "rt\022Pprojects/{project}/locations/{locati"
+ "on}/instances/{instance}/vulnerabilityRe"
+ "port\"b\n\035GetVulnerabilityReportRequest\022A\n"
+ "\004name\030\001 \001(\tB3\340A\002\372A-\n+osconfig.googleapis"
+ ".com/VulnerabilityReport\"\221\001\n\037ListVulnera"
+ "bilityReportsRequest\0227\n\006parent\030\001 \001(\tB\'\340A"
+ "\002\372A!\n\037compute.googleapis.com/Instance\022\021\n"
+ "\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\016\n\006"
+ "filter\030\004 \001(\t\"\216\001\n ListVulnerabilityReport"
+ "sResponse\022Q\n\025vulnerability_reports\030\001 \003(\013"
+ "22.google.cloud.osconfig.v1alpha.Vulnera"
+ "bilityReport\022\027\n\017next_page_token\030\002 \001(\t\"\335\n"
+ "\n\006CVSSv3\022\022\n\nbase_score\030\001 \001(\002\022\034\n\024exploita"
+ "bility_score\030\002 \001(\002\022\024\n\014impact_score\030\003 \001(\002"
+ "\022I\n\rattack_vector\030\005 \001(\01622.google.cloud.o"
+ "sconfig.v1alpha.CVSSv3.AttackVector\022Q\n\021a"
+ "ttack_complexity\030\006 \001(\01626.google.cloud.os"
+ "config.v1alpha.CVSSv3.AttackComplexity\022U"
+ "\n\023privileges_required\030\007 \001(\01628.google.clo"
+ "ud.osconfig.v1alpha.CVSSv3.PrivilegesReq"
+ "uired\022O\n\020user_interaction\030\010 \001(\01625.google"
+ ".cloud.osconfig.v1alpha.CVSSv3.UserInter"
+ "action\022:\n\005scope\030\t \001(\0162+.google.cloud.osc"
+ "onfig.v1alpha.CVSSv3.Scope\022L\n\026confidenti"
+ "ality_impact\030\n \001(\0162,.google.cloud.osconf"
+ "ig.v1alpha.CVSSv3.Impact\022F\n\020integrity_im"
+ "pact\030\013 \001(\0162,.google.cloud.osconfig.v1alp"
+ "ha.CVSSv3.Impact\022I\n\023availability_impact\030"
+ "\014 \001(\0162,.google.cloud.osconfig.v1alpha.CV"
+ "SSv3.Impact\"\231\001\n\014AttackVector\022\035\n\031ATTACK_V"
+ "ECTOR_UNSPECIFIED\020\000\022\031\n\025ATTACK_VECTOR_NET"
+ "WORK\020\001\022\032\n\026ATTACK_VECTOR_ADJACENT\020\002\022\027\n\023AT"
+ "TACK_VECTOR_LOCAL\020\003\022\032\n\026ATTACK_VECTOR_PHY"
+ "SICAL\020\004\"l\n\020AttackComplexity\022!\n\035ATTACK_CO"
+ "MPLEXITY_UNSPECIFIED\020\000\022\031\n\025ATTACK_COMPLEX"
+ "ITY_LOW\020\001\022\032\n\026ATTACK_COMPLEXITY_HIGH\020\002\"\222\001"
+ "\n\022PrivilegesRequired\022#\n\037PRIVILEGES_REQUI"
+ "RED_UNSPECIFIED\020\000\022\034\n\030PRIVILEGES_REQUIRED"
+ "_NONE\020\001\022\033\n\027PRIVILEGES_REQUIRED_LOW\020\002\022\034\n\030"
+ "PRIVILEGES_REQUIRED_HIGH\020\003\"m\n\017UserIntera"
+ "ction\022 \n\034USER_INTERACTION_UNSPECIFIED\020\000\022"
+ "\031\n\025USER_INTERACTION_NONE\020\001\022\035\n\031USER_INTER"
+ "ACTION_REQUIRED\020\002\"F\n\005Scope\022\025\n\021SCOPE_UNSP"
+ "ECIFIED\020\000\022\023\n\017SCOPE_UNCHANGED\020\001\022\021\n\rSCOPE_"
+ "CHANGED\020\002\"R\n\006Impact\022\026\n\022IMPACT_UNSPECIFIE"
+ "D\020\000\022\017\n\013IMPACT_HIGH\020\001\022\016\n\nIMPACT_LOW\020\002\022\017\n\013"
+ "IMPACT_NONE\020\003B\343\001\n!com.google.cloud.oscon"
+ "fig.v1alphaB\022VulnerabilityProtoP\001ZEgoogl"
+ "e.golang.org/genproto/googleapis/cloud/o"
+ "sconfig/v1alpha;osconfig\252\002\035Google.Cloud."
+ "OsConfig.V1Alpha\312\002\035Google\\Cloud\\OsConfig"
+ "\\V1alpha\352\002 Google::Cloud::OsConfig::V1al"
+ "phab\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_descriptor,
new java.lang.String[] {
"Name", "Vulnerabilities", "UpdateTime",
});
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_descriptor =
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_descriptor,
new java.lang.String[] {
"Details",
"InstalledInventoryItemIds",
"AvailableInventoryItemIds",
"CreateTime",
"UpdateTime",
"Items",
});
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_descriptor =
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_descriptor,
new java.lang.String[] {
"Cve", "CvssV2Score", "CvssV3", "Severity", "Description", "References",
});
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_Reference_descriptor =
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_Reference_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Details_Reference_descriptor,
new java.lang.String[] {
"Url", "Source",
});
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Item_descriptor =
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Item_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_VulnerabilityReport_Vulnerability_Item_descriptor,
new java.lang.String[] {
"InstalledInventoryItemId", "AvailableInventoryItemId", "FixedCpeUri", "UpstreamFix",
});
internal_static_google_cloud_osconfig_v1alpha_GetVulnerabilityReportRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_osconfig_v1alpha_GetVulnerabilityReportRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_GetVulnerabilityReportRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken", "Filter",
});
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsResponse_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_ListVulnerabilityReportsResponse_descriptor,
new java.lang.String[] {
"VulnerabilityReports", "NextPageToken",
});
internal_static_google_cloud_osconfig_v1alpha_CVSSv3_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_osconfig_v1alpha_CVSSv3_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1alpha_CVSSv3_descriptor,
new java.lang.String[] {
"BaseScore",
"ExploitabilityScore",
"ImpactScore",
"AttackVector",
"AttackComplexity",
"PrivilegesRequired",
"UserInteraction",
"Scope",
"ConfidentialityImpact",
"IntegrityImpact",
"AvailabilityImpact",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.intervals.IntervalsSource;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.DistanceFeatureQueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* This defines the core properties and functions to operate on a field.
*/
public abstract class MappedFieldType {
private final String name;
private final boolean docValues;
private final boolean isIndexed;
private final boolean isStored;
private final TextSearchInfo textSearchInfo;
private final Map<String, String> meta;
public MappedFieldType(String name, boolean isIndexed, boolean isStored,
boolean hasDocValues, TextSearchInfo textSearchInfo, Map<String, String> meta) {
this.name = Objects.requireNonNull(name);
this.isIndexed = isIndexed;
this.isStored = isStored;
this.docValues = hasDocValues;
this.textSearchInfo = Objects.requireNonNull(textSearchInfo);
this.meta = Objects.requireNonNull(meta);
}
/**
* Return a fielddata builder for this field
*
* @param fullyQualifiedIndexName the name of the index this field-data is build for
* @param searchLookup a {@link SearchLookup} supplier to allow for accessing other fields values in the context of runtime fields
* @throws IllegalArgumentException if the fielddata is not supported on this type.
* An IllegalArgumentException is needed in order to return an http error 400
* when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status}
*/
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]");
}
/**
* Create a helper class to fetch field values during the {@link FetchFieldsPhase}.
*
* New field types must implement this method in order to support the search 'fields' option. Except
* for metadata fields, field types should not throw {@link UnsupportedOperationException} since this
* could cause a search retrieving multiple fields (like "fields": ["*"]) to fail.
*/
public abstract ValueFetcher valueFetcher(SearchExecutionContext context, @Nullable String format);
/** Returns the name of this type, as would be specified in mapping properties */
public abstract String typeName();
/** Returns the field family type, as used in field capabilities */
public String familyTypeName() {
return typeName();
}
public String name() {
return name;
}
public boolean hasDocValues() {
return docValues;
}
/**
* Returns the collapse type of the field
* CollapseType.NONE means the field can'be used for collapsing.
* @return collapse type of the field
*/
public CollapseType collapseType() {
return CollapseType.NONE;
}
/** Given a value that comes from the stored fields API, convert it to the
* expected type. For instance a date field would store dates as longs and
* format it back to a string in this method. */
public Object valueForDisplay(Object value) {
return value;
}
/**
* Returns true if the field is searchable.
*/
public boolean isSearchable() {
return isIndexed;
}
/**
* Returns true if the field is stored separately.
*/
public boolean isStored() {
return isStored;
}
/**
* If the field supports using the indexed data to speed up operations related to ordering of data, such as sorting or aggs, return
* a function for doing that. If it is unsupported for this field type, there is no need to override this method.
*
* @return null if the optimization cannot be applied, otherwise a function to use for the optimization
*/
@Nullable
public Function<byte[], Number> pointReaderIfPossible() {
return null;
}
/** Returns true if the field is aggregatable.
*
*/
public boolean isAggregatable() {
try {
fielddataBuilder("", () -> {
throw new UnsupportedOperationException("SearchLookup not available");
});
return true;
} catch (IllegalArgumentException e) {
return false;
}
}
/**
* @return true if field has been marked as a dimension field
*/
public boolean isDimension() {
return false;
}
/** Generates a query that will only match documents that contain the given value.
* The default implementation returns a {@link TermQuery} over the value bytes
* @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable
* due to the way it is configured (eg. not indexed)
* @throws ElasticsearchParseException if {@code value} cannot be converted to the expected data type
* @throws UnsupportedOperationException if the field is not searchable regardless of options
* @throws QueryShardException if the field is not searchable regardless of options
*/
// TODO: Standardize exception types
public abstract Query termQuery(Object value, @Nullable SearchExecutionContext context);
// Case insensitive form of term query (not supported by all fields so must be overridden to enable)
public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionContext context) {
throw new QueryShardException(context, "[" + name + "] field which is of type [" + typeName() +
"], does not support case insensitive term queries");
}
/** Build a constant-scoring query that matches all values. The default implementation uses a
* {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link Occur#SHOULD} clauses
* are generated with {@link #termQuery}. */
public Query termsQuery(Collection<?> values, @Nullable SearchExecutionContext context) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
for (Object value : values) {
builder.add(termQuery(value, context), Occur.SHOULD);
}
return new ConstantScoreQuery(builder.build());
}
/**
* Factory method for range queries.
* @param relation the relation, nulls should be interpreted like INTERSECTS
*/
public Query rangeQuery(
Object lowerTerm, Object upperTerm,
boolean includeLower, boolean includeUpper,
ShapeRelation relation, ZoneId timeZone, DateMathParser parser,
SearchExecutionContext context) {
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
}
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions,
SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use fuzzy queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
// Case sensitive form of prefix query
public final Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) {
return prefixQuery(value, method, false, context);
}
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitve,
SearchExecutionContext context) {
throw new QueryShardException(context, "Can only use prefix queries on keyword, text and wildcard fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
// Case sensitive form of wildcard query
public final Query wildcardQuery(String value,
@Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context
) {
return wildcardQuery(value, method, false, context);
}
public Query wildcardQuery(String value,
@Nullable MultiTermQuery.RewriteMethod method,
boolean caseInsensitve, SearchExecutionContext context) {
throw new QueryShardException(context, "Can only use wildcard queries on keyword, text and wildcard fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query normalizedWildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) {
throw new QueryShardException(context, "Can only use wildcard queries on keyword, text and wildcard fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query regexpQuery(String value, int syntaxFlags, int matchFlags, int maxDeterminizedStates,
@Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) {
throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query existsQuery(SearchExecutionContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else if (getTextSearchInfo().hasNorms()) {
return new NormsFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}
public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements,
SearchExecutionContext context) throws IOException {
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements,
SearchExecutionContext context) throws IOException {
throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException {
throw new IllegalArgumentException("Can only use phrase prefix queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use span prefix queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) {
throw new IllegalArgumentException("Illegal data type of [" + typeName() + "]!"+
"[" + DistanceFeatureQueryBuilder.NAME + "] query can only be run on a date, date_nanos or geo_point field type!");
}
/**
* Create an {@link IntervalsSource} for the given term.
*/
public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
/**
* Create an {@link IntervalsSource} for the given prefix.
*/
public IntervalsSource prefixIntervals(BytesRef prefix, SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
/**
* Create a fuzzy {@link IntervalsSource} for the given term.
*/
public IntervalsSource fuzzyIntervals(String term, int maxDistance, int prefixLength,
boolean transpositions, SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
/**
* Create a wildcard {@link IntervalsSource} for the given pattern.
*/
public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) {
throw new IllegalArgumentException("Can only use interval queries on text fields - not on [" + name
+ "] which is of type [" + typeName() + "]");
}
/**
* An enum used to describe the relation between the range of terms in a
* shard when compared with a query range
*/
public enum Relation {
WITHIN,
INTERSECTS,
DISJOINT
}
/** Return whether all values of the given {@link IndexReader} are within the range,
* outside the range or cross the range. The default implementation returns
* {@link Relation#INTERSECTS}, which is always fine to return when there is
* no way to check whether values are actually within bounds. */
public Relation isFieldWithinQuery(
IndexReader reader,
Object from, Object to,
boolean includeLower, boolean includeUpper,
ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
return Relation.INTERSECTS;
}
/** @throws IllegalArgumentException if the fielddata is not supported on this type.
* An IllegalArgumentException is needed in order to return an http error 400
* when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status}
**/
protected final void failIfNoDocValues() {
if (hasDocValues() == false) {
throw new IllegalArgumentException("Can't load fielddata on [" + name()
+ "] because fielddata is unsupported on fields of type ["
+ typeName() + "]. Use doc values instead.");
}
}
protected final void failIfNotIndexed() {
if (isIndexed == false) {
// we throw an IAE rather than an ISE so that it translates to a 4xx code rather than 5xx code on the http layer
throw new IllegalArgumentException("Cannot search on field [" + name() + "] since it is not indexed.");
}
}
/**
* @return if this field type should load global ordinals eagerly
*/
public boolean eagerGlobalOrdinals() {
return false;
}
/**
* Pick a {@link DocValueFormat} that can be used to display and parse
* values of fields of this type.
*/
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
checkNoFormat(format);
checkNoTimeZone(timeZone);
return DocValueFormat.RAW;
}
/**
* Validate the provided {@code format} is null.
*/
protected void checkNoFormat(@Nullable String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
}
/**
* Validate the provided {@code timeZone} is null.
*/
protected void checkNoTimeZone(@Nullable ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
}
}
/**
* Extract a {@link Term} from a query created with {@link #termQuery} by
* recursively removing {@link BoostQuery} wrappers.
* @throws IllegalArgumentException if the wrapped query is not a {@link TermQuery}
*/
public static Term extractTerm(Query termQuery) {
while (termQuery instanceof BoostQuery) {
termQuery = ((BoostQuery) termQuery).getQuery();
}
if (termQuery instanceof TermInSetQuery) {
TermInSetQuery tisQuery = (TermInSetQuery) termQuery;
PrefixCodedTerms terms = tisQuery.getTermData();
if (terms.size() == 1) {
TermIterator it = terms.iterator();
BytesRef term = it.next();
return new Term(it.field(), term);
}
}
if (termQuery instanceof TermQuery == false) {
throw new IllegalArgumentException("Cannot extract a term from a query of type "
+ termQuery.getClass() + ": " + termQuery);
}
return ((TermQuery) termQuery).getTerm();
}
/**
* Get the metadata associated with this field.
*/
public Map<String, String> meta() {
return meta;
}
/**
* Returns information on how any text in this field is indexed
*
* Fields that do not support any text-based queries should return
* {@link TextSearchInfo#NONE}. Some fields (eg keyword) may support
* only simple match queries, and can return
* {@link TextSearchInfo#SIMPLE_MATCH_ONLY}; other fields may support
* simple match queries without using the terms index, and can return
* {@link TextSearchInfo#SIMPLE_MATCH_WITHOUT_TERMS}
*/
public TextSearchInfo getTextSearchInfo() {
return textSearchInfo;
}
public enum CollapseType {
NONE, // this field is not collapsable
KEYWORD,
NUMERIC
}
/**
* This method is used to support auto-complete services and implementations
* are expected to find terms beginning with the provided string very quickly.
* If fields cannot look up matching terms quickly they should return null.
* The returned TermEnum should implement next(), term() and doc_freq() methods
* but postings etc are not required.
* @param caseInsensitive if matches should be case insensitive
* @param string the partially complete word the user has typed (can be empty)
* @param queryShardContext the shard context
* @param searchAfter - usually null. If supplied the TermsEnum result must be positioned after the provided term (used for pagination)
* @return null or an enumeration of matching terms and their doc frequencies
* @throws IOException Errors accessing data
*/
public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter)
throws IOException {
return null;
}
}
| |
package com.ctrip.xpipe.redis.console.service.impl;
import com.ctrip.xpipe.cluster.ClusterType;
import com.ctrip.xpipe.redis.console.model.*;
import com.ctrip.xpipe.redis.console.sentinel.SentinelBalanceService;
import com.ctrip.xpipe.redis.console.service.ShardService;
import com.google.common.collect.Lists;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
public class SentinelGroupServiceTest extends AbstractServiceImplTest {
@Autowired
private SentinelGroupServiceImpl sentinelGroupService;
@Autowired
private SentinelServiceImpl sentinelService;
@Autowired
private ShardService shardService;
@Autowired
private SentinelBalanceService sentinelBalanceService;
@Test
public void find() {
List<SentinelGroupModel> dcSentinels = sentinelGroupService.findAllByDcName("jq");
Assert.assertEquals(dcSentinels.size(), 1);
Assert.assertEquals(1L, dcSentinels.get(0).getSentinelGroupId());
Assert.assertEquals(ClusterType.ONE_WAY.name(), dcSentinels.get(0).getClusterType().toUpperCase());
Assert.assertEquals(3, dcSentinels.get(0).getSentinels().size());
dcSentinels = sentinelGroupService.findAllByDcAndType("jq", ClusterType.SINGLE_DC);
Assert.assertEquals(dcSentinels.size(), 0);
dcSentinels = sentinelGroupService.findAllByDcAndType("oy", ClusterType.ONE_WAY);
Assert.assertEquals(dcSentinels.size(), 1);
Assert.assertEquals(2L, dcSentinels.get(0).getSentinelGroupId());
Assert.assertEquals(ClusterType.ONE_WAY.name(), dcSentinels.get(0).getClusterType().toUpperCase());
Assert.assertEquals(3, dcSentinels.get(0).getSentinels().size());
SentinelGroupModel sentinelGroupModel = sentinelGroupService.findById(3);
Assert.assertEquals(2, sentinelGroupModel.getSentinels().size());
}
@Test
public void findByShard() {
createCluster(ClusterType.ONE_WAY, Lists.newArrayList("one_way_shard_1", "one_way_shard_2"), "one_way_cluster");
List<ShardTbl> clusterShards = shardService.findAllByClusterName("one_way_cluster");
Map<Long, SentinelGroupModel> shardSentinels = sentinelGroupService.findByShard(clusterShards.get(0).getId());
Assert.assertEquals(2, shardSentinels.size());
}
@Test
public void addSentinelGroup() {
SentinelGroupModel sentinelGroupModel1 = new SentinelGroupModel().setClusterType(ClusterType.SINGLE_DC.name()).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6000),
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6001),
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6002)
));
sentinelGroupService.addSentinelGroup(sentinelGroupModel1);
List<SentinelGroupModel> singleDcSentinels = sentinelGroupService.findAllByDcAndType("jq", ClusterType.SINGLE_DC);
Assert.assertEquals(1, singleDcSentinels.size());
Assert.assertEquals(3, singleDcSentinels.get(0).getSentinels().size());
Assert.assertEquals(ClusterType.SINGLE_DC.name(), singleDcSentinels.get(0).getClusterType());
Assert.assertEquals(1, singleDcSentinels.get(0).dcIds().size());
SentinelGroupModel sentinelGroupModel2 = new SentinelGroupModel().setClusterType(ClusterType.CROSS_DC.name()).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(7000),
new SentinelInstanceModel().setDcId(2L).setSentinelIp("127.0.0.1").setSentinelPort(7001),
new SentinelInstanceModel().setDcId(3L).setSentinelIp("127.0.0.1").setSentinelPort(7002)
));
sentinelGroupService.addSentinelGroup(sentinelGroupModel2);
List<SentinelGroupModel> crossDcSentinels = sentinelGroupService.findAllByDcAndType("jq", ClusterType.CROSS_DC);
Assert.assertEquals(1, crossDcSentinels.size());
Assert.assertEquals(1, crossDcSentinels.get(0).getSentinels().size());
Assert.assertEquals(ClusterType.CROSS_DC.name(), crossDcSentinels.get(0).getClusterType());
Assert.assertEquals(1, crossDcSentinels.get(0).dcIds().size());
List<SentinelTbl> sentinelTbls = sentinelService.findBySentinelGroupId(1);
sentinelTbls.forEach(sentinelTbl -> {
sentinelService.delete(sentinelTbl.getSentinelId());
});
SentinelGroupModel sentinelGroupModel = sentinelGroupService.findById(1);
Assert.assertEquals(0, sentinelGroupModel.getSentinels().size());
sentinelGroupService.addSentinelGroup(new SentinelGroupModel().setSentinelGroupId(1).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(8000),
new SentinelInstanceModel().setDcId(2L).setSentinelIp("127.0.0.1").setSentinelPort(8001),
new SentinelInstanceModel().setDcId(3L).setSentinelIp("127.0.0.1").setSentinelPort(8002)
)));
sentinelGroupModel = sentinelGroupService.findById(1);
Assert.assertEquals(3, sentinelGroupModel.getSentinels().size());
}
@Test
public void getSentinelGroupsWithUsageByType() {
SentinelGroupModel sentinelGroupModel1 = new SentinelGroupModel().setClusterType(ClusterType.ONE_WAY.name()).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6000),
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6001),
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(6002)
));
sentinelGroupService.addSentinelGroup(sentinelGroupModel1);
SentinelGroupModel sentinelGroupModelCrossDc1 = new SentinelGroupModel().setClusterType(ClusterType.CROSS_DC.name()).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(7000),
new SentinelInstanceModel().setDcId(2L).setSentinelIp("127.0.0.1").setSentinelPort(7001),
new SentinelInstanceModel().setDcId(2L).setSentinelIp("127.0.0.1").setSentinelPort(7002)
));
sentinelGroupService.addSentinelGroup(sentinelGroupModelCrossDc1);
SentinelGroupModel sentinelGroupModelCrossDc2 = new SentinelGroupModel().setClusterType(ClusterType.CROSS_DC.name()).setSentinels(Lists.newArrayList(
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(8000),
new SentinelInstanceModel().setDcId(1L).setSentinelIp("127.0.0.1").setSentinelPort(8001),
new SentinelInstanceModel().setDcId(2L).setSentinelIp("127.0.0.1").setSentinelPort(8002),
new SentinelInstanceModel().setDcId(3L).setSentinelIp("127.0.0.1").setSentinelPort(8003),
new SentinelInstanceModel().setDcId(3L).setSentinelIp("127.0.0.1").setSentinelPort(8004)
));
sentinelGroupService.addSentinelGroup(sentinelGroupModelCrossDc2);
//force refresh cache
sentinelBalanceService.selectSentinelWithoutCache("OY", ClusterType.ONE_WAY);
createCluster(ClusterType.ONE_WAY, Lists.newArrayList("one_way_shard_11", "one_way_shard_12", "one_way_shard_13"), "one_way_1");
createCluster(ClusterType.ONE_WAY, Lists.newArrayList("one_way_shard_21", "one_way_shard_22"), "one_way_2");
List<SentinelGroupModel> result = sentinelGroupService.getSentinelGroupsWithUsageByType(ClusterType.ONE_WAY);
Assert.assertEquals(4, result.size());
int jqUsageOneWay = 0;
int oyUsageOneWay = 0;
int fraUsageOneWay = 0;
for (SentinelGroupModel sentinelGroupModel : result) {
if (sentinelGroupModel.getClusterType().equalsIgnoreCase(ClusterType.ONE_WAY.name()) && sentinelGroupModel.dcIds().size() == 1) {
long dcId = sentinelGroupModel.dcIds().iterator().next();
if (dcId == 1L)
jqUsageOneWay += sentinelGroupModel.getShardCount();
else if (dcId == 2L)
oyUsageOneWay += sentinelGroupModel.getShardCount();
else
fraUsageOneWay += sentinelGroupModel.getShardCount();
}
}
Assert.assertEquals(5, jqUsageOneWay);
Assert.assertEquals(5, oyUsageOneWay);
Assert.assertEquals(0, fraUsageOneWay);
createCluster(ClusterType.CROSS_DC, Lists.newArrayList("cross_dc_shard_1", "cross_dc_shard_2"), "cross_dc_cluster");
result = sentinelGroupService.getSentinelGroupsWithUsageByType(ClusterType.CROSS_DC);
Assert.assertEquals(2, result.size());
int crossDcUsage = 0;
for (SentinelGroupModel sentinelGroupModel : result) {
crossDcUsage += sentinelGroupModel.getShardCount();
}
Assert.assertEquals(2, crossDcUsage);
List<SentinelGroupModel> all = sentinelGroupService.getAllSentinelGroupsWithUsage();
Assert.assertEquals(6, all.size());
int xpipeSentinels = 0;
int xpipeSentinelsUsage = 0;
int crossSentinels = 0;
int crossSentinelsUsage = 0;
for (SentinelGroupModel sentinelGroupModel : all) {
if (sentinelGroupModel.getClusterType().equalsIgnoreCase(ClusterType.ONE_WAY.name())) {
xpipeSentinels++;
xpipeSentinelsUsage += sentinelGroupModel.getShardCount();
} else if (sentinelGroupModel.getClusterType().equalsIgnoreCase(ClusterType.CROSS_DC.name())) {
crossSentinels++;
crossSentinelsUsage += sentinelGroupModel.getShardCount();
}
}
Assert.assertEquals(4, xpipeSentinels);
Assert.assertEquals(2, crossSentinels);
Assert.assertEquals(10, xpipeSentinelsUsage);
Assert.assertEquals(2, crossSentinelsUsage);
Map<String, SentinelUsageModel> allUsages = sentinelGroupService.getAllSentinelsUsage();
Assert.assertEquals(3, allUsages.size());
Assert.assertEquals(4, allUsages.get("jq").getSentinelUsages().size());
Assert.assertEquals(3, allUsages.get("oy").getSentinelUsages().size());
Assert.assertEquals(2, allUsages.get("fra").getSentinelUsages().size());
}
@Test
public void updateSentinelGroup() {
List<SentinelGroupModel> dcSentinels = sentinelGroupService.findAllByDcName("jq");
SentinelGroupModel toUpdate = dcSentinels.get(0);
List<SentinelInstanceModel> instanceModels = toUpdate.getSentinels();
SentinelInstanceModel updateInstance = instanceModels.get(0);
String oldIp = updateInstance.getSentinelIp();
int oldPort = updateInstance.getSentinelPort();
Assert.assertTrue(toUpdate.getSentinelsAddressString().contains(String.format("%s:%d", oldIp, oldPort)));
String newIp = "127.0.0.2";
int newPort = 9999;
updateInstance.setSentinelIp(newIp).setSentinelPort(newPort);
sentinelGroupService.updateSentinelGroupAddress(toUpdate);
SentinelGroupModel updated = sentinelGroupService.findById(toUpdate.getSentinelGroupId());
Assert.assertFalse(updated.getSentinelsAddressString().contains(String.format("%s:%d", oldIp, oldPort)));
Assert.assertTrue(updated.getSentinelsAddressString().contains(String.format("%s:%d", newIp, newPort)));
}
@Test
public void deleteAndReheal() {
List<SentinelGroupModel> dcSentinels = sentinelGroupService.findAllByDcName("jq");
SentinelGroupModel toDelete = dcSentinels.get(0);
sentinelGroupService.delete(toDelete.getSentinelGroupId());
sentinelGroupService.findAllByDcName("jq").forEach(sentinelGroupModel -> {
Assert.assertNotEquals(toDelete.getSentinelGroupId(),sentinelGroupModel.getSentinelGroupId());
});
sentinelGroupService.reheal(toDelete.getSentinelGroupId());
Assert.assertNotNull(sentinelGroupService.findById(toDelete.getSentinelGroupId()));
AtomicBoolean rehealed=new AtomicBoolean(false);
sentinelGroupService.findAllByDcName("jq").forEach(sentinelGroupModel -> {
if(toDelete.getSentinelGroupId()==sentinelGroupModel.getSentinelGroupId()){
rehealed.set(true);
Assert.assertTrue(sentinelGroupModel.getSentinels().size()>1);
}
});
Assert.assertTrue(rehealed.get());
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package view;
import java.awt.event.ActionListener;
import javax.swing.JOptionPane;
import javax.swing.table.DefaultTableModel;
/**
*
* @author acer
*/
public class viewPeramalan extends javax.swing.JFrame {
/**
* Creates new form view_peminjaman
*/
public viewPeramalan() {
initComponents();
}
public void SetName(String text) {
Nametag.setText(text);
}
public void KlikBack(ActionListener a) {
btnback.addActionListener(a);
}
public String Getname() {
String name;
name = Nametag.getText();
return name;
}
// public String[] GetData() {
// String Data[] = new String[6];
// Data[0] = this.id_transpinjaman.getText();
// Data[1] = this.tgl_pinjaman.getText();
// Data[2] = this.id_nasabah.getText();
// Data[3] = this.besar_pinjaman.getText();
// Data[4] = clogin.datapetugas[0];
// Data[5] = this.cicil.getText();
// return Data;
// }
public void klikexit(ActionListener action) {
tombolexit.addActionListener(action);
}
public void klikminimize(ActionListener action) {
tombolminimize.addActionListener(action);
}
public void klikRamal(ActionListener action) {
btnRamal.addActionListener(action);
}
public String[] getDataRamal () {
String [] data = new String [2] ;
data [0] = JTJumlahBibit.getText();
data [1] = JtLuasKolam .getText();
return data ;
}
public void message(String message) {
JOptionPane.showMessageDialog(this, message);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
tombolexit = new javax.swing.JButton();
tombolminimize = new javax.swing.JButton();
Nametag = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
btnback = new javax.swing.JButton();
JTJumlahBibit = new javax.swing.JTextField();
JtLuasKolam = new javax.swing.JTextField();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
btnRamal = new javax.swing.JButton();
background = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setUndecorated(true);
getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout());
tombolexit.setIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/Login_Close.png"))); // NOI18N
tombolexit.setBorderPainted(false);
tombolexit.setContentAreaFilled(false);
tombolexit.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/Login_CloseMouseOver.png"))); // NOI18N
getContentPane().add(tombolexit, new org.netbeans.lib.awtextra.AbsoluteConstraints(1320, 0, 50, 20));
tombolminimize.setIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/All_Minimize.png"))); // NOI18N
tombolminimize.setBorder(null);
tombolminimize.setBorderPainted(false);
tombolminimize.setContentAreaFilled(false);
tombolminimize.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/All_MinimizeMouseOver.png"))); // NOI18N
getContentPane().add(tombolminimize, new org.netbeans.lib.awtextra.AbsoluteConstraints(1290, 0, -1, 20));
Nametag.setFont(new java.awt.Font("Caviar Dreams", 0, 18)); // NOI18N
Nametag.setForeground(new java.awt.Color(255, 255, 255));
Nametag.setToolTipText("");
getContentPane().add(Nametag, new org.netbeans.lib.awtextra.AbsoluteConstraints(1220, 140, -1, -1));
jLabel1.setFont(new java.awt.Font("Bebas Neue", 0, 48)); // NOI18N
jLabel1.setText("Masukkan Variabel Peramalan :");
getContentPane().add(jLabel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(370, 170, -1, -1));
btnback.setIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/back.png"))); // NOI18N
btnback.setBorder(null);
btnback.setBorderPainted(false);
btnback.setContentAreaFilled(false);
btnback.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/backmouseover.png"))); // NOI18N
getContentPane().add(btnback, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 120, -1, -1));
JTJumlahBibit.setFont(new java.awt.Font("Dialog", 0, 24)); // NOI18N
getContentPane().add(JTJumlahBibit, new org.netbeans.lib.awtextra.AbsoluteConstraints(560, 280, 170, 50));
getContentPane().add(JtLuasKolam, new org.netbeans.lib.awtextra.AbsoluteConstraints(560, 360, 170, 50));
jLabel2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel2.setText("Luas Kolam (M2):");
getContentPane().add(jLabel2, new org.netbeans.lib.awtextra.AbsoluteConstraints(410, 370, -1, -1));
jLabel3.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel3.setText("Jumlah Bibit (Ekor) :");
getContentPane().add(jLabel3, new org.netbeans.lib.awtextra.AbsoluteConstraints(390, 290, -1, -1));
btnRamal.setIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/ramal.png"))); // NOI18N
btnRamal.setBorderPainted(false);
btnRamal.setContentAreaFilled(false);
btnRamal.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/ramal_mouseover.png"))); // NOI18N
getContentPane().add(btnRamal, new org.netbeans.lib.awtextra.AbsoluteConstraints(520, 410, 260, 90));
background.setIcon(new javax.swing.ImageIcon(getClass().getResource("/gambar/HomeAnggotaKelompok.png"))); // NOI18N
background.setText("jLabel1");
getContentPane().add(background, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 1370, 770));
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(viewPeramalan.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(viewPeramalan.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(viewPeramalan.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(viewPeramalan.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new viewPeramalan().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTextField JTJumlahBibit;
private javax.swing.JTextField JtLuasKolam;
private javax.swing.JLabel Nametag;
private javax.swing.JLabel background;
private javax.swing.JButton btnRamal;
private javax.swing.JButton btnback;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JButton tombolexit;
private javax.swing.JButton tombolminimize;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.kafka;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.connectors.kafka.config.OffsetCommitMode;
import org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher;
import org.apache.flink.streaming.connectors.kafka.internals.AbstractPartitionDiscoverer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaDeserializationSchemaWrapper;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaFetcher;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaPartitionDiscoverer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor;
import org.apache.flink.util.PropertiesUtil;
import org.apache.flink.util.SerializedValue;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Pattern;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.PropertiesUtil.getBoolean;
import static org.apache.flink.util.PropertiesUtil.getLong;
/**
* The Flink Kafka Consumer is a streaming data source that pulls a parallel data stream from Apache
* Kafka. The consumer can run in multiple parallel instances, each of which will pull data from one
* or more Kafka partitions.
*
* <p>The Flink Kafka Consumer participates in checkpointing and guarantees that no data is lost
* during a failure, and that the computation processes elements "exactly once". (Note: These
* guarantees naturally assume that Kafka itself does not loose any data.)
*
* <p>Please note that Flink snapshots the offsets internally as part of its distributed
* checkpoints. The offsets committed to Kafka are only to bring the outside view of progress in
* sync with Flink's view of the progress. That way, monitoring and other jobs can get a view of how
* far the Flink Kafka consumer has consumed a topic.
*
* <p>Please refer to Kafka's documentation for the available configuration properties:
* http://kafka.apache.org/documentation.html#newconsumerconfigs
*/
@PublicEvolving
public class FlinkKafkaConsumer<T> extends FlinkKafkaConsumerBase<T> {
private static final long serialVersionUID = 1L;
/** Configuration key to change the polling timeout. * */
public static final String KEY_POLL_TIMEOUT = "flink.poll-timeout";
/**
* From Kafka's Javadoc: The time, in milliseconds, spent waiting in poll if data is not
* available. If 0, returns immediately with any records that are available now.
*/
public static final long DEFAULT_POLL_TIMEOUT = 100L;
// ------------------------------------------------------------------------
/** User-supplied properties for Kafka. * */
protected final Properties properties;
/**
* From Kafka's Javadoc: The time, in milliseconds, spent waiting in poll if data is not
* available. If 0, returns immediately with any records that are available now
*/
protected final long pollTimeout;
// ------------------------------------------------------------------------
/**
* Creates a new Kafka streaming source consumer.
*
* @param topic The name of the topic that should be consumed.
* @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and
* Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
String topic, DeserializationSchema<T> valueDeserializer, Properties props) {
this(Collections.singletonList(topic), valueDeserializer, props);
}
/**
* Creates a new Kafka streaming source consumer.
*
* <p>This constructor allows passing a {@see KafkaDeserializationSchema} for reading key/value
* pairs, offsets, and topic names from Kafka.
*
* @param topic The name of the topic that should be consumed.
* @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages
* and Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
String topic, KafkaDeserializationSchema<T> deserializer, Properties props) {
this(Collections.singletonList(topic), deserializer, props);
}
/**
* Creates a new Kafka streaming source consumer.
*
* <p>This constructor allows passing multiple topics to the consumer.
*
* @param topics The Kafka topics to read from.
* @param deserializer The de-/serializer used to convert between Kafka's byte messages and
* Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
List<String> topics, DeserializationSchema<T> deserializer, Properties props) {
this(topics, new KafkaDeserializationSchemaWrapper<>(deserializer), props);
}
/**
* Creates a new Kafka streaming source consumer.
*
* <p>This constructor allows passing multiple topics and a key/value deserialization schema.
*
* @param topics The Kafka topics to read from.
* @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages
* and Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
List<String> topics, KafkaDeserializationSchema<T> deserializer, Properties props) {
this(topics, null, deserializer, props);
}
/**
* Creates a new Kafka streaming source consumer. Use this constructor to subscribe to multiple
* topics based on a regular expression pattern.
*
* <p>If partition discovery is enabled (by setting a non-negative value for {@link
* FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics with
* names matching the pattern will also be subscribed to as they are created on the fly.
*
* @param subscriptionPattern The regular expression for a pattern of topic names to subscribe
* to.
* @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and
* Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
Pattern subscriptionPattern,
DeserializationSchema<T> valueDeserializer,
Properties props) {
this(
null,
subscriptionPattern,
new KafkaDeserializationSchemaWrapper<>(valueDeserializer),
props);
}
/**
* Creates a new Kafka streaming source consumer. Use this constructor to subscribe to multiple
* topics based on a regular expression pattern.
*
* <p>If partition discovery is enabled (by setting a non-negative value for {@link
* FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics with
* names matching the pattern will also be subscribed to as they are created on the fly.
*
* <p>This constructor allows passing a {@see KafkaDeserializationSchema} for reading key/value
* pairs, offsets, and topic names from Kafka.
*
* @param subscriptionPattern The regular expression for a pattern of topic names to subscribe
* to.
* @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages
* and Flink's objects.
* @param props
*/
public FlinkKafkaConsumer(
Pattern subscriptionPattern,
KafkaDeserializationSchema<T> deserializer,
Properties props) {
this(null, subscriptionPattern, deserializer, props);
}
private FlinkKafkaConsumer(
List<String> topics,
Pattern subscriptionPattern,
KafkaDeserializationSchema<T> deserializer,
Properties props) {
super(
topics,
subscriptionPattern,
deserializer,
getLong(
checkNotNull(props, "props"),
KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS,
PARTITION_DISCOVERY_DISABLED),
!getBoolean(props, KEY_DISABLE_METRICS, false));
this.properties = props;
setDeserializer(this.properties);
// configure the polling timeout
try {
if (properties.containsKey(KEY_POLL_TIMEOUT)) {
this.pollTimeout = Long.parseLong(properties.getProperty(KEY_POLL_TIMEOUT));
} else {
this.pollTimeout = DEFAULT_POLL_TIMEOUT;
}
} catch (Exception e) {
throw new IllegalArgumentException(
"Cannot parse poll timeout for '" + KEY_POLL_TIMEOUT + '\'', e);
}
}
@Override
protected AbstractFetcher<T, ?> createFetcher(
SourceContext<T> sourceContext,
Map<KafkaTopicPartition, Long> assignedPartitionsWithInitialOffsets,
SerializedValue<WatermarkStrategy<T>> watermarkStrategy,
StreamingRuntimeContext runtimeContext,
OffsetCommitMode offsetCommitMode,
MetricGroup consumerMetricGroup,
boolean useMetrics)
throws Exception {
// make sure that auto commit is disabled when our offset commit mode is ON_CHECKPOINTS;
// this overwrites whatever setting the user configured in the properties
adjustAutoCommitConfig(properties, offsetCommitMode);
return new KafkaFetcher<>(
sourceContext,
assignedPartitionsWithInitialOffsets,
watermarkStrategy,
runtimeContext.getProcessingTimeService(),
runtimeContext.getExecutionConfig().getAutoWatermarkInterval(),
runtimeContext.getUserCodeClassLoader(),
runtimeContext.getTaskNameWithSubtasks(),
deserializer,
properties,
pollTimeout,
runtimeContext.getMetricGroup(),
consumerMetricGroup,
useMetrics);
}
@Override
protected AbstractPartitionDiscoverer createPartitionDiscoverer(
KafkaTopicsDescriptor topicsDescriptor,
int indexOfThisSubtask,
int numParallelSubtasks) {
return new KafkaPartitionDiscoverer(
topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties);
}
@Override
protected Map<KafkaTopicPartition, Long> fetchOffsetsWithTimestamp(
Collection<KafkaTopicPartition> partitions, long timestamp) {
Map<TopicPartition, Long> partitionOffsetsRequest = new HashMap<>(partitions.size());
for (KafkaTopicPartition partition : partitions) {
partitionOffsetsRequest.put(
new TopicPartition(partition.getTopic(), partition.getPartition()), timestamp);
}
final Map<KafkaTopicPartition, Long> result = new HashMap<>(partitions.size());
// use a short-lived consumer to fetch the offsets;
// this is ok because this is a one-time operation that happens only on startup
try (KafkaConsumer<?, ?> consumer = new KafkaConsumer(properties)) {
for (Map.Entry<TopicPartition, OffsetAndTimestamp> partitionToOffset :
consumer.offsetsForTimes(partitionOffsetsRequest).entrySet()) {
result.put(
new KafkaTopicPartition(
partitionToOffset.getKey().topic(),
partitionToOffset.getKey().partition()),
(partitionToOffset.getValue() == null)
? null
: partitionToOffset.getValue().offset());
}
}
return result;
}
@Override
protected boolean getIsAutoCommitEnabled() {
return getBoolean(properties, ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true)
&& PropertiesUtil.getLong(
properties, ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 5000)
> 0;
}
/**
* Makes sure that the ByteArrayDeserializer is registered in the Kafka properties.
*
* @param props The Kafka properties to register the serializer in.
*/
private static void setDeserializer(Properties props) {
final String deSerName = ByteArrayDeserializer.class.getName();
Object keyDeSer = props.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
Object valDeSer = props.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
if (keyDeSer != null && !keyDeSer.equals(deSerName)) {
LOG.warn(
"Ignoring configured key DeSerializer ({})",
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
}
if (valDeSer != null && !valDeSer.equals(deSerName)) {
LOG.warn(
"Ignoring configured value DeSerializer ({})",
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
}
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deSerName);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deSerName);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.history.dmn;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
import java.util.Date;
import java.util.List;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.history.HistoricDecisionInstance;
import org.camunda.bpm.engine.history.HistoricDecisionInstanceQuery;
import org.camunda.bpm.engine.history.NativeHistoricDecisionInstanceQuery;
import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.repository.DecisionRequirementsDefinition;
import org.camunda.bpm.engine.runtime.CaseInstance;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.test.Deployment;
import org.camunda.bpm.engine.test.RequiredHistoryLevel;
import org.camunda.bpm.engine.variable.VariableMap;
import org.camunda.bpm.engine.variable.Variables;
import org.joda.time.DateTime;
/**
* @author Philipp Ossler
* @author Ingo Richtsmeier
*/
@RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_FULL)
public class HistoricDecisionInstanceQueryTest extends PluggableProcessEngineTestCase {
protected static final String DECISION_CASE = "org/camunda/bpm/engine/test/history/HistoricDecisionInstanceTest.caseWithDecisionTask.cmmn";
protected static final String DECISION_PROCESS = "org/camunda/bpm/engine/test/history/HistoricDecisionInstanceTest.processWithBusinessRuleTask.bpmn20.xml";
protected static final String DECISION_SINGLE_OUTPUT_DMN = "org/camunda/bpm/engine/test/history/HistoricDecisionInstanceTest.decisionSingleOutput.dmn11.xml";
protected static final String DECISION_NO_INPUT_DMN = "org/camunda/bpm/engine/test/history/HistoricDecisionInstanceTest.noInput.dmn11.xml";
protected static final String DRG_DMN = "org/camunda/bpm/engine/test/dmn/deployment/drdDish.dmn11.xml";
protected static final String DECISION_DEFINITION_KEY = "testDecision";
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryIncludeInputsForNonExistingDecision() {
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery().includeInputs();
assertThat(query.singleResult(), is(nullValue()));
startProcessInstanceAndEvaluateDecision();
assertThat(query.decisionInstanceId("nonExisting").singleResult(), is(nullValue()));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryIncludeOutputs() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
try {
query.singleResult().getOutputs();
fail("expected exception: output not fetched");
} catch (ProcessEngineException e) {
// should throw exception if output is not fetched
}
assertThat(query.includeOutputs().singleResult().getOutputs().size(), is(1));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryIncludeOutputsForNonExistingDecision() {
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery().includeOutputs();
assertThat(query.singleResult(), is(nullValue()));
startProcessInstanceAndEvaluateDecision();
assertThat(query.decisionInstanceId("nonExisting").singleResult(), is(nullValue()));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_NO_INPUT_DMN })
public void testQueryIncludeInputsNoInput() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.includeInputs().singleResult().getInputs().size(), is(0));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_NO_INPUT_DMN })
public void testQueryIncludeOutputsNoInput() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.includeOutputs().singleResult().getOutputs().size(), is(0));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryPaging() {
startProcessInstanceAndEvaluateDecision();
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.listPage(0, 2).size(), is(2));
assertThat(query.listPage(1, 1).size(), is(1));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQuerySortByEvaluationTime() {
startProcessInstanceAndEvaluateDecision();
waitASignificantAmountOfTime();
startProcessInstanceAndEvaluateDecision();
List<HistoricDecisionInstance> orderAsc = historyService.createHistoricDecisionInstanceQuery().orderByEvaluationTime().asc().list();
assertThat(orderAsc.get(0).getEvaluationTime().before(orderAsc.get(1).getEvaluationTime()), is(true));
List<HistoricDecisionInstance> orderDesc = historyService.createHistoricDecisionInstanceQuery().orderByEvaluationTime().desc().list();
assertThat(orderDesc.get(0).getEvaluationTime().after(orderDesc.get(1).getEvaluationTime()), is(true));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByDecisionInstanceId() {
ProcessInstance pi1 = startProcessInstanceAndEvaluateDecision();
ProcessInstance pi2 = startProcessInstanceAndEvaluateDecision();
String decisionInstanceId1 = historyService.createHistoricDecisionInstanceQuery().processInstanceId(pi1.getId()).singleResult().getId();
String decisionInstanceId2 = historyService.createHistoricDecisionInstanceQuery().processInstanceId(pi2.getId()).singleResult().getId();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionInstanceId(decisionInstanceId1).count(), is(1L));
assertThat(query.decisionInstanceId(decisionInstanceId2).count(), is(1L));
assertThat(query.decisionInstanceId("unknown").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByDecisionInstanceIds() {
ProcessInstance pi1 = startProcessInstanceAndEvaluateDecision();
ProcessInstance pi2 = startProcessInstanceAndEvaluateDecision();
String decisionInstanceId1 = historyService.createHistoricDecisionInstanceQuery().processInstanceId(pi1.getId()).singleResult().getId();
String decisionInstanceId2 = historyService.createHistoricDecisionInstanceQuery().processInstanceId(pi2.getId()).singleResult().getId();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionInstanceIdIn(decisionInstanceId1).count(), is(1L));
assertThat(query.decisionInstanceIdIn(decisionInstanceId2).count(), is(1L));
assertThat(query.decisionInstanceIdIn(decisionInstanceId1, decisionInstanceId2).count(), is(2L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByDecisionDefinitionId() {
String decisionDefinitionId = repositoryService.createDecisionDefinitionQuery().decisionDefinitionKey(DECISION_DEFINITION_KEY).singleResult().getId();
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionDefinitionId(decisionDefinitionId).count(), is(1L));
assertThat(query.decisionDefinitionId("other id").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByDecisionDefinitionKey() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionDefinitionKey(DECISION_DEFINITION_KEY).count(), is(1L));
assertThat(query.decisionDefinitionKey("other key").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByDecisionDefinitionName() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionDefinitionName("sample decision").count(), is(1L));
assertThat(query.decisionDefinitionName("other name").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByProcessDefinitionKey() {
String processDefinitionKey = repositoryService.createProcessDefinitionQuery().singleResult().getKey();
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.processDefinitionKey(processDefinitionKey).count(), is(1L));
assertThat(query.processDefinitionKey("other process").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByProcessDefinitionId() {
String processDefinitionId = repositoryService.createProcessDefinitionQuery().singleResult().getId();
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.processDefinitionId(processDefinitionId).count(), is(1L));
assertThat(query.processDefinitionId("other process").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByProcessInstanceId() {
startProcessInstanceAndEvaluateDecision();
String processInstanceId = runtimeService.createProcessInstanceQuery().singleResult().getId();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.processInstanceId(processInstanceId).count(), is(1L));
assertThat(query.processInstanceId("other process").count(), is(0L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByActivityId() {
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.activityIdIn("task").count(), is(1L));
assertThat(query.activityIdIn("other activity").count(), is(0L));
assertThat(query.activityIdIn("task", "other activity").count(), is(1L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByActivityInstanceId() {
startProcessInstanceAndEvaluateDecision();
String activityInstanceId = historyService.createHistoricActivityInstanceQuery().activityId("task").singleResult().getId();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.activityInstanceIdIn(activityInstanceId).count(), is(1L));
assertThat(query.activityInstanceIdIn("other activity").count(), is(0L));
assertThat(query.activityInstanceIdIn(activityInstanceId, "other activity").count(), is(1L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByEvaluatedBefore() {
Date beforeEvaluated = new Date(1441612000);
Date evaluated = new Date(1441613000);
Date afterEvaluated = new Date(1441614000);
ClockUtil.setCurrentTime(evaluated);
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.evaluatedBefore(afterEvaluated).count(), is(1L));
assertThat(query.evaluatedBefore(evaluated).count(), is(1L));
assertThat(query.evaluatedBefore(beforeEvaluated).count(), is(0L));
ClockUtil.reset();
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByEvaluatedAfter() {
Date beforeEvaluated = new Date(1441612000);
Date evaluated = new Date(1441613000);
Date afterEvaluated = new Date(1441614000);
ClockUtil.setCurrentTime(evaluated);
startProcessInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.evaluatedAfter(beforeEvaluated).count(), is(1L));
assertThat(query.evaluatedAfter(evaluated).count(), is(1L));
assertThat(query.evaluatedAfter(afterEvaluated).count(), is(0L));
ClockUtil.reset();
}
@Deployment(resources = { DECISION_CASE, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByCaseDefinitionKey() {
createCaseInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseDefinitionKey("case").count(), is(1L));
}
public void testQueryByInvalidCaseDefinitionKey() {
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseDefinitionKey("invalid").count(), is(0L));
try {
query.caseDefinitionKey(null);
fail("exception expected");
} catch (ProcessEngineException e) {
}
}
@Deployment(resources = { DECISION_CASE, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByCaseDefinitionId() {
CaseInstance caseInstance = createCaseInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseDefinitionId(caseInstance.getCaseDefinitionId()).count(), is(1L));
}
public void testQueryByInvalidCaseDefinitionId() {
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseDefinitionId("invalid").count(), is(0L));
try {
query.caseDefinitionId(null);
fail("exception expected");
} catch (ProcessEngineException e) {
}
}
@Deployment(resources = { DECISION_CASE, DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByCaseInstanceId() {
CaseInstance caseInstance = createCaseInstanceAndEvaluateDecision();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseInstanceId(caseInstance.getId()).count(), is(1L));
}
public void testQueryByInvalidCaseInstanceId() {
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.caseInstanceId("invalid").count(), is(0L));
try {
query.caseInstanceId(null);
fail("exception expected");
} catch (ProcessEngineException e) {
}
}
@Deployment(resources = { DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByUserId() {
evaluateDecisionWithAuthenticatedUser("demo");
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.userId("demo").count(), is(1L));
}
@Deployment(resources = { DECISION_SINGLE_OUTPUT_DMN })
public void testQueryByInvalidUserId() {
evaluateDecisionWithAuthenticatedUser("demo");
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.userId("dem1").count(), is(0L));
try {
query.userId(null);
fail("exception expected");
} catch (ProcessEngineException e) {
}
}
@Deployment(resources = { DRG_DMN })
public void testQueryByRootDecisionInstanceId() {
decisionService.evaluateDecisionTableByKey("dish-decision")
.variables(Variables.createVariables().putValue("temperature", 21).putValue("dayType", "Weekend"))
.evaluate();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.count(), is(3L));
String rootDecisionInstanceId = query.decisionDefinitionKey("dish-decision").singleResult().getId();
String requiredDecisionInstanceId1 = query.decisionDefinitionKey("season").singleResult().getId();
String requiredDecisionInstanceId2 = query.decisionDefinitionKey("guestCount").singleResult().getId();
query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.rootDecisionInstanceId(rootDecisionInstanceId).count(), is(3L));
assertThat(query.rootDecisionInstanceId(requiredDecisionInstanceId1).count(), is(0L));
assertThat(query.rootDecisionInstanceId(requiredDecisionInstanceId2).count(), is(0L));
}
@Deployment(resources = { DRG_DMN })
public void testQueryByRootDecisionInstancesOnly() {
decisionService.evaluateDecisionTableByKey("dish-decision")
.variables(Variables.createVariables().putValue("temperature", 21).putValue("dayType", "Weekend"))
.evaluate();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.count(), is(3L));
assertThat(query.rootDecisionInstancesOnly().count(), is(1L));
assertThat(query.rootDecisionInstancesOnly().singleResult().getDecisionDefinitionKey(), is("dish-decision"));
}
@Deployment(resources = { DRG_DMN })
public void testQueryByDecisionRequirementsDefinitionId() {
decisionService.evaluateDecisionTableByKey("dish-decision")
.variables(Variables.createVariables().putValue("temperature", 21).putValue("dayType", "Weekend"))
.evaluate();
DecisionRequirementsDefinition decisionRequirementsDefinition = repositoryService.createDecisionRequirementsDefinitionQuery().singleResult();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionRequirementsDefinitionId("notExisting").count(), is(0L));
assertThat(query.decisionRequirementsDefinitionId(decisionRequirementsDefinition.getId()).count(), is(3L));
}
@Deployment(resources = { DRG_DMN })
public void testQueryByDecisionRequirementsDefinitionKey() {
decisionService.evaluateDecisionTableByKey("dish-decision")
.variables(Variables.createVariables().putValue("temperature", 21).putValue("dayType", "Weekend"))
.evaluate();
HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery();
assertThat(query.decisionRequirementsDefinitionKey("notExisting").count(), is(0L));
assertThat(query.decisionRequirementsDefinitionKey("dish").count(), is(3L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testNativeQuery() {
startProcessInstanceAndEvaluateDecision();
String tablePrefix = processEngineConfiguration.getDatabaseTablePrefix();
NativeHistoricDecisionInstanceQuery nativeQuery = historyService
.createNativeHistoricDecisionInstanceQuery().sql("SELECT * FROM " + tablePrefix + "ACT_HI_DECINST");
assertThat(nativeQuery.list().size(), is(1));
NativeHistoricDecisionInstanceQuery nativeQueryWithParameter = historyService
.createNativeHistoricDecisionInstanceQuery()
.sql("SELECT * FROM " + tablePrefix + "ACT_HI_DECINST H WHERE H.DEC_DEF_KEY_ = #{decisionDefinitionKey}");
assertThat(nativeQueryWithParameter.parameter("decisionDefinitionKey", DECISION_DEFINITION_KEY).list().size(), is(1));
assertThat(nativeQueryWithParameter.parameter("decisionDefinitionKey", "other decision").list().size(), is(0));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testNativeCountQuery() {
startProcessInstanceAndEvaluateDecision();
String tablePrefix = processEngineConfiguration.getDatabaseTablePrefix();
NativeHistoricDecisionInstanceQuery nativeQuery = historyService
.createNativeHistoricDecisionInstanceQuery().sql("SELECT count(*) FROM " + tablePrefix + "ACT_HI_DECINST");
assertThat(nativeQuery.count(), is(1L));
}
@Deployment(resources = { DECISION_PROCESS, DECISION_SINGLE_OUTPUT_DMN })
public void testNativeQueryPaging() {
startProcessInstanceAndEvaluateDecision();
startProcessInstanceAndEvaluateDecision();
String tablePrefix = processEngineConfiguration.getDatabaseTablePrefix();
NativeHistoricDecisionInstanceQuery nativeQuery = historyService.createNativeHistoricDecisionInstanceQuery()
.sql("SELECT * FROM " + tablePrefix + "ACT_HI_DECINST");
assertThat(nativeQuery.listPage(0, 2).size(), is(2));
assertThat(nativeQuery.listPage(1, 1).size(), is(1));
}
protected ProcessInstance startProcessInstanceAndEvaluateDecision() {
return runtimeService.startProcessInstanceByKey("testProcess", getVariables());
}
protected CaseInstance createCaseInstanceAndEvaluateDecision() {
return caseService
.withCaseDefinitionByKey("case")
.setVariables(getVariables())
.create();
}
protected void evaluateDecisionWithAuthenticatedUser(String userId) {
identityService.setAuthenticatedUserId(userId);
VariableMap variables = Variables.putValue("input1", "test");
decisionService.evaluateDecisionTableByKey(DECISION_DEFINITION_KEY, variables);
}
protected VariableMap getVariables() {
VariableMap variables = Variables.createVariables();
variables.put("input1", "test");
return variables;
}
/**
* Use between two rule evaluations to ensure the expected order by evaluation time.
*/
protected void waitASignificantAmountOfTime() {
DateTime now = new DateTime(ClockUtil.getCurrentTime());
ClockUtil.setCurrentTime(now.plusSeconds(10).toDate());
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.07 at 08:01:35 PM IST
//
package com.mozu.qbintegration.model.qbmodel.allgen;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <group ref="{}ListCore"/>
* <element name="Name" minOccurs="0">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="31"/>
* </restriction>
* </simpleType>
* </element>
* <element name="FullName" minOccurs="0">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="159"/>
* </restriction>
* </simpleType>
* </element>
* <element name="BarCodeValue" minOccurs="0">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="50"/>
* </restriction>
* </simpleType>
* </element>
* <element ref="{}IsActive" minOccurs="0"/>
* <element ref="{}ClassRef" minOccurs="0"/>
* <element ref="{}ParentRef" minOccurs="0"/>
* <element ref="{}Sublevel" minOccurs="0"/>
* <element ref="{}IsTaxIncluded" minOccurs="0"/>
* <element ref="{}SalesTaxCodeRef" minOccurs="0"/>
* <choice minOccurs="0">
* <element ref="{}SalesOrPurchase"/>
* <element ref="{}SalesAndPurchase"/>
* </choice>
* <element ref="{}SpecialItemType" minOccurs="0"/>
* <element ref="{}ExternalGUID" minOccurs="0"/>
* <element ref="{}DataExtRet" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"listID",
"timeCreated",
"timeModified",
"editSequence",
"name",
"fullName",
"barCodeValue",
"isActive",
"classRef",
"parentRef",
"sublevel",
"isTaxIncluded",
"salesTaxCodeRef",
"salesOrPurchase",
"salesAndPurchase",
"specialItemType",
"externalGUID",
"dataExtRet"
})
@XmlRootElement(name = "ItemOtherChargeRet")
public class ItemOtherChargeRet {
@XmlElement(name = "ListID")
protected String listID;
@XmlElement(name = "TimeCreated")
protected String timeCreated;
@XmlElement(name = "TimeModified")
protected String timeModified;
@XmlElement(name = "EditSequence")
protected String editSequence;
@XmlElement(name = "Name")
protected String name;
@XmlElement(name = "FullName")
protected String fullName;
@XmlElement(name = "BarCodeValue")
protected String barCodeValue;
@XmlElement(name = "IsActive")
protected String isActive;
@XmlElement(name = "ClassRef")
protected ClassRef classRef;
@XmlElement(name = "ParentRef")
protected ParentRef parentRef;
@XmlElement(name = "Sublevel")
protected BigInteger sublevel;
@XmlElement(name = "IsTaxIncluded")
protected String isTaxIncluded;
@XmlElement(name = "SalesTaxCodeRef")
protected SalesTaxCodeRef salesTaxCodeRef;
@XmlElement(name = "SalesOrPurchase")
protected SalesOrPurchase salesOrPurchase;
@XmlElement(name = "SalesAndPurchase")
protected SalesAndPurchase salesAndPurchase;
@XmlElement(name = "SpecialItemType")
protected String specialItemType;
@XmlElement(name = "ExternalGUID")
protected String externalGUID;
@XmlElement(name = "DataExtRet")
protected List<DataExtRet> dataExtRet;
/**
* Gets the value of the listID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getListID() {
return listID;
}
/**
* Sets the value of the listID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setListID(String value) {
this.listID = value;
}
/**
* Gets the value of the timeCreated property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTimeCreated() {
return timeCreated;
}
/**
* Sets the value of the timeCreated property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTimeCreated(String value) {
this.timeCreated = value;
}
/**
* Gets the value of the timeModified property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTimeModified() {
return timeModified;
}
/**
* Sets the value of the timeModified property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTimeModified(String value) {
this.timeModified = value;
}
/**
* Gets the value of the editSequence property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEditSequence() {
return editSequence;
}
/**
* Sets the value of the editSequence property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEditSequence(String value) {
this.editSequence = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the fullName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFullName() {
return fullName;
}
/**
* Sets the value of the fullName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFullName(String value) {
this.fullName = value;
}
/**
* Gets the value of the barCodeValue property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBarCodeValue() {
return barCodeValue;
}
/**
* Sets the value of the barCodeValue property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBarCodeValue(String value) {
this.barCodeValue = value;
}
/**
* Gets the value of the isActive property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIsActive() {
return isActive;
}
/**
* Sets the value of the isActive property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIsActive(String value) {
this.isActive = value;
}
/**
* Gets the value of the classRef property.
*
* @return
* possible object is
* {@link ClassRef }
*
*/
public ClassRef getClassRef() {
return classRef;
}
/**
* Sets the value of the classRef property.
*
* @param value
* allowed object is
* {@link ClassRef }
*
*/
public void setClassRef(ClassRef value) {
this.classRef = value;
}
/**
* Gets the value of the parentRef property.
*
* @return
* possible object is
* {@link ParentRef }
*
*/
public ParentRef getParentRef() {
return parentRef;
}
/**
* Sets the value of the parentRef property.
*
* @param value
* allowed object is
* {@link ParentRef }
*
*/
public void setParentRef(ParentRef value) {
this.parentRef = value;
}
/**
* Gets the value of the sublevel property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getSublevel() {
return sublevel;
}
/**
* Sets the value of the sublevel property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setSublevel(BigInteger value) {
this.sublevel = value;
}
/**
* Gets the value of the isTaxIncluded property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIsTaxIncluded() {
return isTaxIncluded;
}
/**
* Sets the value of the isTaxIncluded property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIsTaxIncluded(String value) {
this.isTaxIncluded = value;
}
/**
* Gets the value of the salesTaxCodeRef property.
*
* @return
* possible object is
* {@link SalesTaxCodeRef }
*
*/
public SalesTaxCodeRef getSalesTaxCodeRef() {
return salesTaxCodeRef;
}
/**
* Sets the value of the salesTaxCodeRef property.
*
* @param value
* allowed object is
* {@link SalesTaxCodeRef }
*
*/
public void setSalesTaxCodeRef(SalesTaxCodeRef value) {
this.salesTaxCodeRef = value;
}
/**
* Gets the value of the salesOrPurchase property.
*
* @return
* possible object is
* {@link SalesOrPurchase }
*
*/
public SalesOrPurchase getSalesOrPurchase() {
return salesOrPurchase;
}
/**
* Sets the value of the salesOrPurchase property.
*
* @param value
* allowed object is
* {@link SalesOrPurchase }
*
*/
public void setSalesOrPurchase(SalesOrPurchase value) {
this.salesOrPurchase = value;
}
/**
* Gets the value of the salesAndPurchase property.
*
* @return
* possible object is
* {@link SalesAndPurchase }
*
*/
public SalesAndPurchase getSalesAndPurchase() {
return salesAndPurchase;
}
/**
* Sets the value of the salesAndPurchase property.
*
* @param value
* allowed object is
* {@link SalesAndPurchase }
*
*/
public void setSalesAndPurchase(SalesAndPurchase value) {
this.salesAndPurchase = value;
}
/**
* Gets the value of the specialItemType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSpecialItemType() {
return specialItemType;
}
/**
* Sets the value of the specialItemType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSpecialItemType(String value) {
this.specialItemType = value;
}
/**
* Gets the value of the externalGUID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getExternalGUID() {
return externalGUID;
}
/**
* Sets the value of the externalGUID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setExternalGUID(String value) {
this.externalGUID = value;
}
/**
* Gets the value of the dataExtRet property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the dataExtRet property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDataExtRet().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DataExtRet }
*
*
*/
public List<DataExtRet> getDataExtRet() {
if (dataExtRet == null) {
dataExtRet = new ArrayList<DataExtRet>();
}
return this.dataExtRet;
}
}
| |
package org.inaetics.dronessimulator.architecturemanager;
import org.inaetics.dronessimulator.common.architecture.SimulationAction;
import org.inaetics.dronessimulator.common.architecture.SimulationState;
import org.inaetics.dronessimulator.common.protocol.RequestArchitectureStateChangeMessage;
import org.inaetics.dronessimulator.discovery.api.Discoverer;
import org.inaetics.dronessimulator.discovery.api.DuplicateName;
import org.inaetics.dronessimulator.discovery.api.Instance;
import org.inaetics.dronessimulator.discovery.api.instances.ArchitectureInstance;
import org.inaetics.pubsub.api.pubsub.Subscriber;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* Architecture Manager service
* Manages architecture wide concerns.
* Currently this only consists of the current lifecycle state of the architecture
* Uses Discovery and Subscriber to publish current state and receive requested state updates
*/
public class ArchitectureManager implements Subscriber {
/**
* Reference to discovery bundle to publish state information
*/
private volatile Discoverer discoverer;
/**
* The instance published in Discovery
*/
private Instance instance;
/**
* The current previous state of the architecture
*/
private SimulationState previousState;
/**
* The last action taken by the architecture
*/
private SimulationAction previousAction;
/**
* The current state of the architecture
*/
private SimulationState currentState;
/**
* Construct a new Architecture Manager
* Sets the begin state of the architecture and created the instance for Discovery
*/
public ArchitectureManager() {
previousState = SimulationState.NOSTATE;
previousAction = SimulationAction.INIT;
currentState = SimulationState.INIT;
this.instance = new ArchitectureInstance(getCurrentProperties());
}
/**
*Create the logger
*/
private static final org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger(ArchitectureManager.class);
@Override
public void receive(Object o, MultipartCallbacks multipartCallbacks) {
System.out.println("[ArchitectureManager] Got message " + o);
if (o instanceof RequestArchitectureStateChangeMessage) {
RequestArchitectureStateChangeMessage msg = (RequestArchitectureStateChangeMessage) o;
SimulationAction action = msg.getAction();
SimulationState nextState = nextState(this.currentState, action);
if (nextState != null) {
// New state! Save and publish on discovery
this.previousState = this.currentState;
this.previousAction = action;
this.currentState = nextState;
log.info("New transition: (" + this.previousState + ", " + this.previousAction + ", " + this.currentState + ")");
instance = safeUpdateProperties(instance, getCurrentProperties());
} else {
log.error(String.format("Received an action which did not led to next state! Current state: %s. Action: %s", currentState, action));
}
}
}
/**
* Start the Architecture Manager service, with the Subscriber being initialized by OSGi
*/
public void start() {
log.info("\n\nStarted Architecture Manager!\n\n");
try {
discoverer.register(instance);
} catch (DuplicateName duplicateName) {
duplicateName.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private Instance safeUpdateProperties(final Instance instance, final Map<String, String> properties) {
try {
return discoverer.updateProperties(instance, properties);
} catch (IOException e) {
log.fatal(e);
}
return instance;
}
/**
* Stops the Architecture Manager service
* Unregisters the current state in Discovery
*/
public void stop() {
log.info("Stopping Architecture Manager...");
try {
discoverer.unregister(instance);
} catch (IOException e) {
log.error(e);
}
log.info("Stopped Architecture Manager!");
}
/**
* Get the current lifecycle state in a map
* which can be used by Discovery Instance
* @return The map to be published with the Instance
*/
public Map<String, String> getCurrentProperties() {
Map<String, String> properties = new HashMap<>();
properties.put("current_life_cycle", String.format("%s.%s.%s", previousState.toString(), previousAction.toString(), currentState.toString()));
return properties;
}
/**
* The next state of the architecture based on the current state and the taken action
* @param currentState The current state of the architecture
* @param action The action to take for the architecture
* @return The new state after the action is taken
*/
public static SimulationState nextState(SimulationState currentState, SimulationAction action) {
SimulationState nextState;
switch(currentState) {
case NOSTATE:
nextState = nextStateFromNoState(action);
break;
case INIT:
nextState = nextStateFromInit(action);
break;
case CONFIG:
nextState = nextStateFromConfig(action);
break;
case RUNNING:
nextState = nextStateFromRunning(action);
break;
case PAUSED:
nextState = nextStateFromPaused(action);
break;
case DONE:
nextState = nextStateFromDone(action);
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromNoState(SimulationAction action) {
SimulationState nextState;
switch(action) {
case INIT:
nextState = SimulationState.INIT;
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromInit(SimulationAction action) {
SimulationState nextState;
switch(action) {
case CONFIG:
nextState = SimulationState.CONFIG;
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromConfig(SimulationAction action) {
SimulationState nextState;
switch(action) {
case START:
nextState = SimulationState.RUNNING;
break;
case STOP:
nextState = SimulationState.INIT;
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromRunning(SimulationAction action) {
SimulationState nextState;
switch(action) {
case STOP:
nextState = SimulationState.INIT;
break;
case PAUSE:
nextState = SimulationState.PAUSED;
break;
case GAMEOVER:
nextState = SimulationState.DONE;
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromPaused(SimulationAction action) {
SimulationState nextState;
switch(action) {
case RESUME:
nextState = SimulationState.RUNNING;
break;
case STOP:
nextState = SimulationState.INIT;
break;
default:
nextState = null;
break;
}
return nextState;
}
private static SimulationState nextStateFromDone(SimulationAction action) {
SimulationState nextState;
switch(action) {
case STOP:
nextState = SimulationState.INIT;
break;
default:
nextState = null;
break;
}
return nextState;
}
}
| |
/*
* Copyright (c) 2015 Lunci Hua
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package org.lunci.dumbthing.activity;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceCategory;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.preference.RingtonePreference;
import android.support.v7.internal.widget.TintCheckBox;
import android.support.v7.internal.widget.TintCheckedTextView;
import android.support.v7.internal.widget.TintEditText;
import android.support.v7.internal.widget.TintRadioButton;
import android.support.v7.internal.widget.TintSpinner;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import org.lunci.dumbthing.R;
import java.util.List;
/**
* A {@link PreferenceActivity} that presents a set of application settings. On
* handset devices, settings are presented as a single list. On tablets,
* settings are split by category, with category headers shown to the left of
* the list of settings.
* <p/>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: Settings</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">Settings
* API Guide</a> for more information on developing a Settings UI.
*/
public class SettingsActivity extends PreferenceActivity {
/**
* Determines whether to always show the simplified settings UI, where
* settings are presented in a single list. When false, settings are shown
* as a master/detail two-pane view on tablets. When true, a single pane is
* shown on tablets.
*/
private static final boolean ALWAYS_SIMPLE_PREFS = true;
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
setupSimplePreferencesScreen();
}
/**
* Shows the simplified settings UI if the device configuration if the
* device configuration dictates that a simplified, single-pane UI should be
* shown.
*/
private void setupSimplePreferencesScreen() {
if (!isSimplePreferences(this)) {
return;
}
// In the simplified UI, fragments are not used at all and we instead
// use the older PreferenceActivity APIs.
// Add 'general' preferences.
addPreferencesFromResource(R.xml.pref_general);
// Add 'notifications' preferences, and a corresponding header.
// PreferenceCategory fakeHeader = new PreferenceCategory(this);
// fakeHeader.setTitle(R.string.pref_header_notifications);
// getPreferenceScreen().addPreference(fakeHeader);
// addPreferencesFromResource(R.xml.pref_notification);
// Add 'data and sync' preferences, and a corresponding header.
// fakeHeader = new PreferenceCategory(this);
// fakeHeader.setTitle(R.string.pref_header_data_sync);
// getPreferenceScreen().addPreference(fakeHeader);
// addPreferencesFromResource(R.xml.pref_data_sync);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences to
// their values. When their values change, their summaries are updated
// to reflect the new value, per the Android Design guidelines.
// bindPreferenceSummaryToValue(findPreference("example_text"));
// bindPreferenceSummaryToValue(findPreference("example_list"));
// bindPreferenceSummaryToValue(findPreference("notifications_new_message_ringtone"));
// bindPreferenceSummaryToValue(findPreference("sync_frequency"));
}
@Override
public View onCreateView(String name, Context context, AttributeSet attrs) {
// Allow super to try and create a view first
final View result = super.onCreateView(name, context, attrs);
if (result != null) {
return result;
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
// If we're running pre-L, we need to 'inject' our tint aware Views in place of the
// standard framework versions
switch (name) {
case "EditText":
return new TintEditText(this, attrs);
case "Spinner":
return new TintSpinner(this, attrs);
case "CheckBox":
return new TintCheckBox(this, attrs);
case "RadioButton":
return new TintRadioButton(this, attrs);
case "CheckedTextView":
return new TintCheckedTextView(this, attrs);
}
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean onIsMultiPane() {
return isXLargeTablet(this) && !isSimplePreferences(this);
}
/**
* Helper method to determine if the device has an extra-large screen. For
* example, 10" tablets are extra-large.
*/
private static boolean isXLargeTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout
& Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE;
}
/**
* Determines whether the simplified settings UI should be shown. This is
* true if this is forced via {@link #ALWAYS_SIMPLE_PREFS}, or the device
* doesn't have newer APIs like {@link PreferenceFragment}, or the device
* doesn't have an extra-large screen. In these cases, a single-pane
* "simplified" settings UI should be shown.
*/
private static boolean isSimplePreferences(Context context) {
return ALWAYS_SIMPLE_PREFS
|| Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB
|| !isXLargeTablet(context);
}
/**
* {@inheritDoc}
*/
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void onBuildHeaders(List<Header> target) {
if (!isSimplePreferences(this)) {
loadHeadersFromResource(R.xml.pref_headers, target);
}
}
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
// private static Preference.OnPreferenceChangeListener sBindPreferenceSummaryToValueListener = new Preference.OnPreferenceChangeListener() {
// @Override
// public boolean onPreferenceChange(Preference preference, Object value) {
// String stringValue = value.toString();
//
// if (preference instanceof ListPreference) {
// // For list preferences, look up the correct display value in
// // the preference's 'entries' list.
// ListPreference listPreference = (ListPreference) preference;
// int index = listPreference.findIndexOfValue(stringValue);
//
// // Set the summary to reflect the new value.
// preference.setSummary(
// index >= 0
// ? listPreference.getEntries()[index]
// : null);
//
// } else if (preference instanceof RingtonePreference) {
// // For ringtone preferences, look up the correct display value
// // using RingtoneManager.
// if (TextUtils.isEmpty(stringValue)) {
// // Empty values correspond to 'silent' (no ringtone).
// preference.setSummary(R.string.pref_ringtone_silent);
//
// } else {
// Ringtone ringtone = RingtoneManager.getRingtone(
// preference.getContext(), Uri.parse(stringValue));
//
// if (ringtone == null) {
// // Clear the summary if there was a lookup error.
// preference.setSummary(null);
// } else {
// // Set the summary to reflect the new ringtone display
// // name.
// String name = ringtone.getTitle(preference.getContext());
// preference.setSummary(name);
// }
// }
//
// } else {
// // For all other preferences, set the summary to the value's
// // simple string representation.
// preference.setSummary(stringValue);
// }
// return true;
// }
// };
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #sBindPreferenceSummaryToValueListener
*/
// private static void bindPreferenceSummaryToValue(Preference preference) {
// // Set the listener to watch for value changes.
// preference.setOnPreferenceChangeListener(sBindPreferenceSummaryToValueListener);
//
// // Trigger the listener immediately with the preference's
// // current value.
// sBindPreferenceSummaryToValueListener.onPreferenceChange(preference,
// PreferenceManager
// .getDefaultSharedPreferences(preference.getContext())
// .getString(preference.getKey(), ""));
// }
/**
* This fragment shows general preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class GeneralPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_general);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
// bindPreferenceSummaryToValue(findPreference("example_text"));
// bindPreferenceSummaryToValue(findPreference("example_list"));
}
}
/**
* This fragment shows notification preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
// @TargetApi(Build.VERSION_CODES.HONEYCOMB)
// public static class NotificationPreferenceFragment extends PreferenceFragment {
// @Override
// public void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// addPreferencesFromResource(R.xml.pref_notification);
//
// // Bind the summaries of EditText/List/Dialog/Ringtone preferences
// // to their values. When their values change, their summaries are
// // updated to reflect the new value, per the Android Design
// // guidelines.
// bindPreferenceSummaryToValue(findPreference("notifications_new_message_ringtone"));
// }
// }
/**
* This fragment shows data and sync preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
// @TargetApi(Build.VERSION_CODES.HONEYCOMB)
// public static class DataSyncPreferenceFragment extends PreferenceFragment {
// @Override
// public void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// addPreferencesFromResource(R.xml.pref_data_sync);
//
// // Bind the summaries of EditText/List/Dialog/Ringtone preferences
// // to their values. When their values change, their summaries are
// // updated to reflect the new value, per the Android Design
// // guidelines.
// bindPreferenceSummaryToValue(findPreference("sync_frequency"));
// }
// }
}
| |
package mil.army.usace.ehlschlaeger.digitalpopulations.censusgen;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.JAXBException;
import mil.army.usace.ehlschlaeger.digitalpopulations.ConstrainedRealizer;
import mil.army.usace.ehlschlaeger.digitalpopulations.PumsHousehold;
import mil.army.usace.ehlschlaeger.digitalpopulations.PumsHouseholdRealization;
import mil.army.usace.ehlschlaeger.digitalpopulations.PumsPopulation;
import mil.army.usace.ehlschlaeger.digitalpopulations.PumsQuery;
import mil.army.usace.ehlschlaeger.digitalpopulations.Realizer;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.filerelationship.PumsTrait;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.filerelationship.Trait;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.fittingcriteria.ClusterSpec;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.fittingcriteria.FittingCriteria;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.fittingcriteria.MatchSpec;
import mil.army.usace.ehlschlaeger.digitalpopulations.censusgen.fittingcriteria.TraitRefElement;
import mil.army.usace.ehlschlaeger.digitalpopulations.csv2kml.ProgressToy;
import mil.army.usace.ehlschlaeger.digitalpopulations.io.HohRznWriter;
import mil.army.usace.ehlschlaeger.digitalpopulations.io.PumsLoader;
import mil.army.usace.ehlschlaeger.rgik.core.CSVTableNoSwing;
import mil.army.usace.ehlschlaeger.rgik.core.DataException;
import mil.army.usace.ehlschlaeger.rgik.core.GISClass;
import mil.army.usace.ehlschlaeger.rgik.core.GISData;
import mil.army.usace.ehlschlaeger.rgik.core.GISLattice;
import mil.army.usace.ehlschlaeger.rgik.core.GISPointQuadTree;
import mil.army.usace.ehlschlaeger.rgik.core.PointSemiVariogram;
import mil.army.usace.ehlschlaeger.rgik.core.RGIS;
import mil.army.usace.ehlschlaeger.rgik.io.StringOutputStream;
import mil.army.usace.ehlschlaeger.rgik.statistics.PointConstraint;
import mil.army.usace.ehlschlaeger.rgik.statistics.PointSpatialStatistic;
import mil.army.usace.ehlschlaeger.rgik.util.LogUtil;
import mil.army.usace.ehlschlaeger.rgik.util.MyRandom;
import mil.army.usace.ehlschlaeger.rgik.util.ObjectUtil;
import mil.army.usace.ehlschlaeger.rgik.util.TimeTracker;
import org.xml.sax.SAXException;
/**
* Select precise easting/northing values for households based on clustering
* criteria. Attempts to mimic reality where people tend to live closer to
* people with similar attributes.
* <P>
* Is phase 4 of censusgen.
* <p>
* Copyright <a href="http://faculty.wiu.edu/CR-Ehlschlaeger2/">Charles R.
* Ehlschlaeger</a>, work: 309-298-1841, fax: 309-298-3003, This software is
* freely usable for research and educational purposes. Contact C. R.
* Ehlschlaeger for permission for other purposes. Use of this software requires
* appropriate citation in all published and unpublished documentation.
*
* @author William R. Zwicky
*/
public class Phase_LocatePrecisely {
/** Name of params key that switches on pre-cluster output. */
public static final String PARAM_SAVEBOTHENDS = "phase4_save_both_ends";
/** File name tag we use on result files saved before we begin. */
public static final String PRECLUSTER_LABEL = "precluster";
/** File name tag we use on result files saved after we're done. */
public static final String POSTCLUSTER_LABEL = "postcluster";
protected static Logger log = Logger.getLogger(Phase_LocatePrecisely.class.getPackage().getName());
/** We always use this many lags. */
protected static final int DEFAULT_NUM_LAGS = 5;
// INPUT DATA //
protected Params params = new Params();
protected Realizer realizer;
protected int realizationNum;
protected List<PumsHousehold> households;
protected GISClass regionMap;
protected GISData bounds;
protected GISLattice popDensityMap;
protected Random random = new Random();
protected CSVTableNoSwing householdSchema;
protected CSVTableNoSwing populationSchema;
// INTERMEDIATE DATA //
/** Index of realizations by location. */
protected GISPointQuadTree<PumsHouseholdRealization> rznIndex;
/** Index of realizations by tract number. */
protected RegionIndex regionIndex;
protected LinkedHashMap<Trait,TraitRefElement> locationSpecs;
protected List<? extends PointConstraint> constraints;
/** Starting PSVs, for comparison to final PSVs. */
ArrayList<PointSpatialStatistic> origPss;
/** Quality statistics for rznIndex. */
ArrayList<PointSpatialStatistic> stats;
/** User's declaration of "good enough". */
ArrayList<PointSpatialStatistic> goals;
HashMap<Integer, Integer> idReverseMap;
// OUTPUT DATA //
// -> none; results are written to disk
/**
* Build standard instance. You will generally want to call setParams() and
* setRandomSource(), then call go() to run and generate output files.
*
* @param realizationNum
* number of realization currently being generated. Copied into
* output file names.
* @param households
* list of households that need to be placed
* @param regionMap
* map which specifies the region that covers each cell
* @param popDensityMap
* raster map of relative population density for each cell. Cells
* with value '0' will receive no households; value '2' will
* receive double the households as value '1'. If null, all cells
* receive equal population.
* @param criteria
* criteria for placing households within tracts, in the order of
* importance
*/
public Phase_LocatePrecisely(
int realizationNum,
PumsHousehold[] households,
GISClass regionMap,
GISLattice popDensityMap,
List<? extends PointConstraint> constraints,
LinkedHashMap<Trait,TraitRefElement> criteria,
HashMap<Integer, Integer> idReverseMap) {
this.realizationNum = realizationNum;
this.households = Arrays.asList(households);
this.regionMap = regionMap;
this.bounds = new GISData(regionMap);
this.popDensityMap = popDensityMap;
this.constraints = constraints;
this.locationSpecs = criteria;
this.idReverseMap = idReverseMap;
// Capture the archtype schemas.
for(PumsHousehold house : households) {
if(house != null) {
// Grab first households schema
if(householdSchema == null)
householdSchema = house.getSchema();
PumsPopulation[] peops = house.getMembersOfHousehold();
if(peops != null && peops.length > 0) {
// Grab first population schema
if(populationSchema == null)
populationSchema = peops[0].getSchema();
}
if(householdSchema != null && populationSchema != null)
break;
}
}
}
/**
* Build an instance for data that is partially processed. Intended for use
* by main(), which expects data that has been run through the prior phases
* of Digital Populations.
*
* @param realizationNum
* number of realization currently being generated. Copied into
* output file names.
* @param households
* list of households that need to be placed, and their initial
* location. Households will stay in their tract, but their
* precise location may move to satisfy 'criteria'.
* @param regionMap
* map which specifies the region that covers each cell
* @param criteria
* criteria for placing households within tracts, in the order of
* importance
*/
public Phase_LocatePrecisely(
int realizationNum,
List<PumsHouseholdRealization> households,
GISClass regionMap,
List<? extends PointConstraint> constraints,
LinkedHashMap<Trait, TraitRefElement> criteria,
HashMap<Integer, Integer> idReverseMap) {
this.realizationNum = realizationNum;
this.regionMap = regionMap;
this.bounds = new GISData(regionMap);
this.constraints = constraints;
this.locationSpecs = criteria;
this.idReverseMap = idReverseMap;
// Capture the attribute schemas.
for(PumsHouseholdRealization house : households) {
if(house != null) {
// Grab first households schema
if(householdSchema == null)
householdSchema = house.getParentHousehold().getSchema();
PumsPopulation[] peops = house.getParentHousehold().getMembersOfHousehold();
if(peops != null && peops.length > 0) {
// Grab first population schema
if(populationSchema == null)
populationSchema = peops[0].getSchema();
}
if(householdSchema != null && populationSchema != null)
break;
}
}
// Add realizations to indices.
this.rznIndex = new GISPointQuadTree<PumsHouseholdRealization>(bounds, 10);
this.regionIndex = new RegionIndex();
for(PumsHouseholdRealization house : households) {
rznIndex.addPoint(house);
regionIndex.add(regionMap.getCellValue(house.getEasting(), house.getNorthing()), house);
}
}
/**
* @return collection of realized and adjusted households. Not valid until
* go() has been called.
*/
// public Iterable<PumsHouseholdRealization> getResults() {
// return rznIndex;
//}
/**
* Change our source of random numbers.
*
* @param source
* new random number generator
*/
public void setRandomSource(Random source) {
random = source;
}
/**
* Install run-time configuration.
*
* @param params current set of run-time parameters
*/
public void setParams(Params params) {
this.params = params;
if(!params.getWriteAllHohFields()) {
log.warning("Paramter '"+Params.WRITE_ALL_HOH_FIELDS+"' must be enabled for phase 4. Enabling now.");
params.setWriteAllHohFields(true);
}
if(!params.getWriteAllPopFields()) {
log.warning("Paramter '"+Params.WRITE_ALL_POP_FIELDS+"' must be enabled for phase 4. Enabling now.");
params.setWriteAllPopFields(true);
}
}
/**
* Install a custom realizer. In this class, this is only used to generate
* easting/northing values when writing output files. If none is provided
* here, we will generate one from our inputs.
*
* @param rzr
* new realizer
*/
public void setRealizer(Realizer rzr) {
this.realizer = rzr;
}
/**
* Perform the process as currently configured.
*
* @throws IOException on any error writing output files
*/
public void go() throws IOException {
// true to save incoming hoh as well as final hoh arrangement for
// comparison. false to purge older when saving final.
boolean saveBothEnds = params.getBoolean(PARAM_SAVEBOTHENDS, true);
// Create an initial layout if needed.
if(rznIndex == null || regionIndex == null) {
LogUtil.progress(log, "ConflatePumsQueryWithTracts: realizing households");
realizeHouseholds();
TimeTracker.finished("Realizing households");
LogUtil.cr(log);
LogUtil.progress(log, "ConflatePumsQueryWithTracts: saving intermediate household locations");
if(saveBothEnds) {
// Label incoming hoh; we'll preserve this.
writeFileSet(PRECLUSTER_LABEL, rznIndex, null, true);
}
else {
// Use generic label; we'll delete when saving final.
writeFileSet("intermediate", rznIndex, null, true);
}
TimeTracker.finished("Writing files");
}
LogUtil.cr(log);
LogUtil.progress(log, "Building goal statistics.");
prepareStats();
LogUtil.cr(log);
LogUtil.progress(log, "Optimizing clustering of households within regions.");
doCluster();
TimeTracker.finished("Phase 4");
// Save final file(s).
LogUtil.cr(log);
LogUtil.progress(log, "ConflatePumsQueryWithTracts: saving final household locations ...");
if(saveBothEnds) {
// Save results, preserve precluster.
writeFileSet(POSTCLUSTER_LABEL, rznIndex, null, false);
}
else {
// Save final results.
// writeFileSet(null, rznIndex, null, true);
}
TimeTracker.finished("Writing files");
}
/**
* Construct initial arrangement of households. This is like
* {@link Phase_InitialPlacement}, only it assigns precise easting/northing
* values within regions.
*/
protected void realizeHouseholds() {
this.rznIndex = new GISPointQuadTree<PumsHouseholdRealization>(bounds, 10);
this.regionIndex = new RegionIndex();
if(realizer == null) {
realizer = new ConstrainedRealizer(
regionMap,
popDensityMap,
constraints,
idReverseMap);
realizer.setRandomSource(random);
}
Iterator<PumsHouseholdRealization> ri = realizer.iterate(households.iterator());
while(ri.hasNext()) {
PumsHouseholdRealization rzn = ri.next();
int tract = regionMap.getCellValue(rzn.getEasting(), rzn.getNorthing());
rznIndex.addPoint(rzn);
regionIndex.add(tract, rzn);
}
}
/**
* Build the statistic objects required by doCluster().
*/
protected void prepareStats() {
long numLags = params.getLong("phase4_num_lags", DEFAULT_NUM_LAGS);
// Prepare stats containers.
origPss = new ArrayList<PointSpatialStatistic>();
stats = new ArrayList<PointSpatialStatistic>();
goals = new ArrayList<PointSpatialStatistic>();
// Build all our PSV and goals.
for (Trait trait : locationSpecs.keySet()) {
// 'trait' says how to compute stats; 'ref' says how to use trait.
TraitRefElement ref = locationSpecs.get(trait);
if(ref instanceof ClusterSpec) {
// Generate clustering stats from a trait.
log.info(" Building "+ref);
ClusterSpec spec = (ClusterSpec)ref;
PumsTraitGetter valueGetter = new PumsTraitGetter((PumsTrait) trait, householdSchema, populationSchema);
PointSemiVariogram psv = PointSemiVariogram.createStat(
spec.distance/numLags, spec.distance,
rznIndex,
valueGetter);
origPss.add(psv);
stats.add(psv.createCopy());
goals.add(reduce(psv, spec.reduction));
}
else if(ref.getClass() == MatchSpec.class){
// MatchSpec is only use for attribute maps, which are
// considered constraints. Since we were passed a list of
// constraints to the constructor, we don't need to process
// these elements.
}
else {
throw new DataException("Unknown position-rules element: "+ref);
}
}
}
/**
* Main method of the algorithm.
*
* @throws IOException
* on any error logging status
*/
protected void doCluster() throws IOException {
assert regionIndex.getNumRegions() > 0;
// Collect sorted list of index codes.
List<Integer> regionIDs = new ArrayList<Integer>(regionIndex.getIDs());
Collections.sort(regionIDs);
// Calculate quality of current arrangement.
double oldSpread = 0;
for(int i=0; i<goals.size(); i++) {
double s = stats.get(i).spread(goals.get(i));
oldSpread += s*s;
}
oldSpread = Math.sqrt(oldSpread);
// Log initial statistics.
LogUtil.cr(log);
LogUtil.result(log, "INITIAL COVARIANCE OF LOCATIONS:");
LogUtil.cr(log);
for(int i=0; i<stats.size(); i++) {
PointSpatialStatistic pss = stats.get(i);
StringOutputStream sos = new StringOutputStream();
sos.format("clus[%d]: ", i);
pss.print(sos);
LogUtil.result(log, sos.toString());
LogUtil.cr(log);
}
LogUtil.cr(log);
LogUtil.result(log, "GOAL COVARIANCE:");
LogUtil.cr(log);
for(int i=0; i<goals.size(); i++) {
PointSpatialStatistic pss = goals.get(i);
StringOutputStream sos = new StringOutputStream();
sos.format("clus[%d]: ", i);
pss.print(sos);
LogUtil.result(log, sos.toString());
LogUtil.cr(log);
}
// Determine amount of time we can spend.
long tMainStart = System.nanoTime();
long tMainAbort = -1;
if(params.getPhase4TimeLimit() > 0)
tMainAbort = tMainStart + (long)(params.getPhase4TimeLimit() * 60 * 1e9);
// Determine max number of consecutive failures allowed.
long maxFails = rznIndex.getNumberPointsIncludingSubNodes();
// Determine when to save intermediate results. (Use same timer as for phase 3.)
long tNextSave = tMainStart + (long)(params.getPhase3SaveIntermediate() * 60 * 1e9);
// while not good enough:
// pick region at random
// pick two points at random
// swap them, update pss
// if pss reduced, keep
// else swap back
printRunHeader(goals);
printRunProgress(oldSpread, 0, 0, 0);
ProgressToy prog = new ProgressToy(log, 60, 0, "Moved");
long attempts=0, fails = 0;
long moves=0, movesAtLastSave = 0;
main_loop: for(;;) {
// Pick region at random.
int tractID = MyRandom.pick(random, regionIDs);
List<PumsHouseholdRealization> tractRzns = regionIndex.getRzns(tractID);
// From region, pick two households at random.
if(random == null)
System.out.println("random");
if(tractRzns==null)
System.out.println("tractRzns");
int p1 = random.nextInt(tractRzns.size());
PumsHouseholdRealization h1 = tractRzns.get(p1);
int p2 = random.nextInt(tractRzns.size());
PumsHouseholdRealization h2 = tractRzns.get(p2);
// Verify that new locations satisfy all point constraints. If ANY
// constraint rejects either house, we cannot swap.
boolean forbid = false;
// - allows() requires hoh to have a location, so we swap here, then swap back when done
//Yizhao
//swapLocation(h1, h2);
this.rznIndex.swapPoints(h1, h2);
for(PointConstraint c : constraints) {
if(c.allows(h1) == PointConstraint.FORBID || c.allows(h2) == PointConstraint.FORBID) {
forbid = true;
break;
}
}
//Yizhao
//swapLocation(h1, h2);
this.rznIndex.swapPoints(h1, h2);
// - check result only after hoh are swapped back
if(forbid) {
fails += 1;
continue;
}
// Swap locations of households, and update statistics.
ArrayList<PointSpatialStatistic> swapd = new ArrayList<PointSpatialStatistic>();
for(PointSpatialStatistic pss : stats) {
PointSpatialStatistic neu = (PointSpatialStatistic) pss.createCopy();
neu.modifySS4RemovedPt(h1, 0);
neu.modifySS4RemovedPt(h2, 0);
swapd.add(neu);
}
//Edited by Yizhao Gao
//swapLocation(h1, h2);
this.rznIndex.swapPoints(h1, h2);
for(PointSpatialStatistic neu : swapd) {
neu.modifySS4NewPt(h1, 0);
neu.modifySS4NewPt(h2, 0);
}
// Calculate quality of new arrangement.
double neuSpread = 0;
for(int i=0; i<goals.size(); i++) {
double s = swapd.get(i).spread(goals.get(i));
neuSpread += s*s;
}
neuSpread = Math.sqrt(neuSpread);
// If quality improved, keep.
if(neuSpread < oldSpread) {
stats = swapd;
oldSpread = neuSpread;
fails = 0;
moves += 1;
}
// else swap back
else {
//Edited by Yizhao Gao
//swapLocation(h1, h2);
this.rznIndex.swapPoints(h1, h2);
fails += 1;
}
attempts += 1;
long tNow = System.nanoTime();
double totalSeconds = (tNow-tMainStart) / 1e9;
boolean printed = false;
if(prog.updateProgress(moves)) {
printRunProgress(oldSpread, totalSeconds, attempts, moves);
// Prevent this line from being duplicated below.
printed = true;
}
// Too many failures, assume no reducements are available.
if(fails > maxFails) {
if(!printed)
printRunProgress(oldSpread, totalSeconds, attempts, moves);
LogUtil.progress(log, "Giving up: Too many failures.");
break main_loop;
}
// Abort run after time limit if requested.
if(tMainAbort > 0 && tNow > tMainAbort) {
if(!printed)
printRunProgress(oldSpread, totalSeconds, attempts, moves);
LogUtil.progress(log, "Giving up: Time limit has been reached.");
break main_loop;
}
// Save intermediate results periodically.
if(tNow > tNextSave) {
// Save only if something has changed.
if(moves != movesAtLastSave) {
movesAtLastSave = moves;
LogUtil.progress(log, "Long run, saving intermediate data set.");
try {
// If saveBothEnds, then we're going to leave
// "intermediate" lying around too. We're not smart
// enough to delete "intermediate" but preserve
// "precluster".
boolean saveBothEnds = params.getBoolean(PARAM_SAVEBOTHENDS, true);
writeFileSet("intermediate", rznIndex, null, !saveBothEnds);
} catch (IOException e) {
log.log(Level.WARNING, "Unable to save intermediate data, continuing anyway.", e);
}
LogUtil.cr(log);
LogUtil.result(log, "INTERMEDIATE COVARIANCE OF LOCATIONS:");
LogUtil.cr(log);
for(int i=0; i<stats.size(); i++) {
PointSpatialStatistic pss = stats.get(i);
PointSpatialStatistic orig = origPss.get(i);
PointSpatialStatistic goal= goals.get(i);
StringOutputStream sos = new StringOutputStream();
sos.format("clus[%d]: ", i);
if(pss instanceof PointSemiVariogram)
((PointSemiVariogram) pss).printComparison(sos, (PointSemiVariogram)orig, (PointSemiVariogram)goal);
else
pss.print(sos);
LogUtil.result(log, sos.toString());
LogUtil.cr(log);
}
}
// Save again precisely one increment from now. Ignore
// however late we are performing this save, and also
// ignore however long this save took.
tNextSave = System.nanoTime() + (long)(params.getPhase3SaveIntermediate() * 60 * 1e9);
}
}
// Log final statistics.
LogUtil.cr(log);
LogUtil.result(log, "FINAL QUALITY OF LOCATIONS:");
LogUtil.cr(log);
for(int i=0; i<stats.size(); i++) {
PointSpatialStatistic pss = stats.get(i);
PointSpatialStatistic orig = origPss.get(i);
PointSpatialStatistic goal= goals.get(i);
StringOutputStream sos = new StringOutputStream();
sos.format("clus[%d]: ", i);
if(pss instanceof PointSemiVariogram)
((PointSemiVariogram) pss).printComparison(sos, (PointSemiVariogram)orig, (PointSemiVariogram)goal);
else
pss.print(sos);
LogUtil.result(log, sos.toString());
LogUtil.cr(log);
}
}
/**
* Print to the log a header line for our progress reports.
*
* @param goals
*/
protected void printRunHeader(ArrayList<PointSpatialStatistic> goals) {
StringBuffer buf = new StringBuffer();
buf.append(String.format("%8s %11s %6s ", "Moves", "Attempts", "Minute"));
for (int i = 0; i < goals.size(); i++)
buf.append(String.format("%10s ", "clus[" + i + "]"));
buf.append(String.format("%10s", "= Spread "));
String st = buf.toString();
LogUtil.cr(log);
LogUtil.progress(log, st);
st = st.replaceAll(".", "-");
LogUtil.progress(log, st);
}
/**
* Print to the log our current progress.
*
* @param spread
* @param totalSeconds
* @param attempts
* @param moves
*/
protected void printRunProgress(double spread, double totalSeconds,
long attempts, long moves) {
StringBuffer buf = new StringBuffer();
buf.append(String.format("%8d %11d %6.1f ", moves, attempts, totalSeconds/60.0));
for(int i=0; i<goals.size(); i++) {
double s = stats.get(i).spread(goals.get(i));
buf.append(String.format("%10.8f ", s));
}
buf.append(String.format("%10.8f", spread));
LogUtil.progress(log, buf.toString());
}
/**
* Write all required output files.
*
* @param nameNote
* which version of file, as a note to the user. Will be embedded
* in file names. Suggestions: "preliminary", "intermediate",
* "phase3", etc. Use null if no note is desired (i.e. is final
* version of file.)
* @param houses
* list of households and attached members (location and
* metadata) to write
* @param filter
* selects a subset of households to write. null will write all
* given households.
* @param flushOlder
* 'true' to delete older versions of these files, or 'false' to
* leave them. This should normally be 'true'; only use 'false'
* for testing and debugging.
*
* @throws IOException
* on any error creating files. If there's a problem renaming
* files, it will only appear in the log.
*/
protected void writeFileSet(
String nameNote,
GISPointQuadTree<PumsHouseholdRealization> houses,
PumsQuery filter, boolean flushOlder)
throws IOException {
HohRznWriter writer = new HohRznWriter(RGIS.getOutputFolder());
boolean writePop = (populationSchema != null);
if(filter != null) {
String newNote = "(filtered)" + ObjectUtil.nz(nameNote);
writer.writeFileSet(
realizationNum, newNote,
filter.iterateRzn(houses.iterator()),
flushOlder, writePop,
params.getWriteAllHohFields(), params.getWriteAllPopFields(),
null, null);
}
// Now we can write the full set of households.
writer.writeFileSet(
realizationNum, nameNote,
houses.iterator(),
flushOlder, writePop,
params.getWriteAllHohFields(), params.getWriteAllPopFields(),
null, null);
}
/**
* <P>
* Build a goal PSV that's reduced from an existing PSV. Inertia values will
* be scaled by given amount progressively: the first lag is reduced by the
* given amount, while points past the final lag are not reduced a all. Lags
* in between are reduced by a proportional ammount. Note that since lags
* cover a range of distances, they will actually be reduced by the average
* value appropriate for their range.
* <P>
* For example, if a reduction of 20% is requested on a PSV with 5 lags, the
* actual reductions will be:
*
* <PRE>
* lag[0] 18%
* lag[1] 14%
* lag[2] 10%
* lag[3] 6%
* lag[4] 2%
* </PRE>
*
* lag[0] needs to be reduced by 20% at distance zero, and 16% at its outer
* limit. Since a lag only contains a single set of statistics, it is simply
* reduced by the average amount of 18%.
*
* @param psv
* PSV to reduce
* @param reduction
* percentage reduction to apply at psv.getMaximumDistance()
*
* @return new PSV with modified inertia values
*/
protected static PointSemiVariogram reduce(PointSemiVariogram psv, double reduction) {
PointSemiVariogram goal = PointSemiVariogram.create(psv.getLagDistance(), psv.getMaximumDistance(), null);
reduction /= 100.0;
double delta = reduction / psv.getNumberLags();
double first = reduction - delta/2;
for (int i = 0; i < psv.getNumberLags(); i++) {
double scale = first - (i*delta);
goal.setInertiaValue(i, (1-scale) * psv.getInertiaValue(i));
}
return goal;
}
/**
* Swap the geographic locations (easting and northing) of two households.
*
* @param h1
* @param h2
*/
/*
protected static void swapLocation(PumsHouseholdRealization h1, PumsHouseholdRealization h2) {
double e1 = h1.getEasting();
double n1 = h1.getNorthing();
h1.setEasting(h2.getEasting());
h1.setNorthing(h2.getNorthing());
h2.setEasting(e1);
h2.setNorthing(n1);
}
*/
/**
* Run this phase on the saved results of the prior phase (class
* {@link Phase_OptimizeRegions}). WARNING: Do not use our
* "rzn###-households.csv" file naming scheme; these may be deleted when the
* new results are saved.
*
* @param args
*
* @throws JAXBException
* @throws IOException
* @throws SAXException
*/
public static void main(String[] args) throws JAXBException, IOException, SAXException {
// args: -c config -r random --xCol n --yCol n --uidCol n --hohPopCol n fcFile hohFile popFile
// -o output-base
if(args.length > 0)
throw new DataException("This is just a test app; all args are hard-coded.");
//TODO: command-line args
// Should try to parse hohFile for rzn number so output file has correct name.
// Also need to tweak name so we don't overwrite input.
// We only do one rzn at a time, so rznNum can be zero.
int rznNum = 0;
File projDir = new File("C:/Users/Bill/Documents/Projects/DigitalPopulations/workspace/Honduras_021111");
File hohFile = new File(projDir, String.format("run/phase4-rzn%03d-households(precluster).csv", rznNum));
File popFile = new File(projDir, String.format("run/phase4-rzn%03d-population(precluster).csv", rznNum));
File fcFile = new File(projDir, "data/goal_relationship_map.dprxml");
FittingCriteria fc = FittingCriteria.loadFile(fcFile, null);
DataPreparer gen = new DataPreparer(fc);
// DP can load household archtypes, but we need realizations.
PumsLoader pl = new PumsLoader();
ArrayList<PumsHouseholdRealization> hohs = pl.loadPumsHouseholdRzns(hohFile, rznNum, "x", "y", "uid");
List<PumsPopulation> pops = pl.loadPumsPopulation(popFile, "household");
pl.populateHouseholdRzns(hohs, pops, null);
Params params = new Params();
params.set(PARAM_SAVEBOTHENDS, true); // force true; always preserve user's files
params.setWriteAllHohFields(true); // disable these if we load user's params file
params.setWriteAllPopFields(true);
Phase_LocatePrecisely phase = new Phase_LocatePrecisely(
rznNum,
hohs,
gen.getPrimaryRegion().map,
gen.makeConstraints(),
fc.traitCluster,
gen.getPrimaryRegion().idReverseMap);
phase.setParams(params);
// phase.setRandomSource(random);
phase.go();
}
}
| |
/**
*
*/
package gov.nih.nci.cagrid.portal.aggr.metachange;
import gov.nih.nci.cagrid.portal.dao.ConceptHierarchyNodeDao;
import gov.nih.nci.cagrid.portal.domain.ConceptHierarchy;
import gov.nih.nci.cagrid.portal.domain.ConceptHierarchyNode;
import gov.nih.nci.system.applicationservice.EVSApplicationService;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.LexGrid.LexBIG.DataModel.Collections.AssociatedConceptList;
import org.LexGrid.LexBIG.DataModel.Collections.ResolvedConceptReferenceList;
import org.LexGrid.LexBIG.DataModel.Core.AssociatedConcept;
import org.LexGrid.LexBIG.DataModel.Core.Association;
import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeVersionOrTag;
import org.LexGrid.LexBIG.DataModel.Core.ResolvedConceptReference;
import org.LexGrid.LexBIG.Extensions.Generic.LexBIGServiceConvenienceMethods;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet;
import org.LexGrid.concepts.Definition;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.orm.hibernate3.HibernateTemplate;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a>
*
*/
public class LexBIGConceptHandler implements ConceptHandler {
private static final Log logger = LogFactory
.getLog(LexBIGConceptHandler.class);
private HibernateTemplate hibernateTemplate;
private EVSApplicationService evsApplicationService;
private CodingSchemeVersionOrTag version;
private String codingSchemeUri;
private ConceptHierarchyNodeDao conceptHierarchyNodeDao;
/**
*
*/
public LexBIGConceptHandler() {
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.cagrid.portal.aggr.metachange.ConceptHandler#handleConcept(java.lang.String)
*/
public ConceptHierarchyNode handleConcept(String code) {
ConceptHierarchyNode node = null;
try {
node = handleCode(getHierarchy(), code);
} catch (Exception ex) {
throw new RuntimeException("Error handling concept: "
+ ex.getMessage(), ex);
}
return node;
}
private ConceptHierarchyNode handleCode(ConceptHierarchy hierarchy,
String code) throws Exception {
ConceptHierarchyNode leaf = null;
ResolvedConceptReference[] pathFromRoot = getPathFromRoot(code);
List<ConceptHierarchyNode> pathFromRootNodes = new ArrayList<ConceptHierarchyNode>();
for (int idx = 0; idx < pathFromRoot.length; idx++) {
ResolvedConceptReference ac = pathFromRoot[idx];
logger.debug("ac = " + ac.getEntityDescription().getContent());
ConceptHierarchyNode node = getConceptHierarchyNodeDao()
.getByConceptInHierarchy(hierarchy, pathFromRootNodes,
ac.getConceptCode());
if (node == null) {
logger.debug("creating new node");
node = createNode(hierarchy, idx, ac);
if (idx > 0) {
// Then we're on some child
// Add current node as child to parent
ConceptHierarchyNode parentNode = pathFromRootNodes
.get(idx - 1);
logger.debug("adding node as child to "
+ parentNode.getId());
node.setParent(parentNode);
// Add current node as descendant to each ancestor
for (ConceptHierarchyNode ancestorNode : pathFromRootNodes) {
logger.debug("adding node as descendant to "
+ ancestorNode.getId());
node.getAncestors().add(ancestorNode);
}
getConceptHierarchyNodeDao().save(node);
}
getHibernateTemplate().flush();
}
pathFromRootNodes.add(node);
}
if(pathFromRootNodes.size() > 0){
leaf = pathFromRootNodes.get(pathFromRoot.length - 1);
}
return leaf;
}
private ConceptHierarchyNode createNode(ConceptHierarchy hierarchy,
int level, ResolvedConceptReference ac) {
ConceptHierarchyNode node = new ConceptHierarchyNode();
node.setHierarchy(hierarchy);
node.setLevel(level);
node.setName(ac.getEntityDescription().getContent());
node.setCode(ac.getConceptCode());
Definition[] defs = ac.getReferencedEntry().getDefinition();
if (defs.length > 0) {
node.setDescription(defs[0].getText().getContent());
}
getConceptHierarchyNodeDao().save(node);
return node;
}
private ConceptHierarchy getHierarchy() throws Exception {
ConceptHierarchy h = getConceptHierarchyNodeDao().getHierarchyByUri(
getCodingSchemeUri());
if (h == null) {
h = new ConceptHierarchy();
h.setUri(getCodingSchemeUri());
getHibernateTemplate().save(h);
}
return h;
}
private ResolvedConceptReference[] getPathFromRoot(String code)
throws Exception {
ArrayList<ResolvedConceptReference> pathList = new ArrayList<ResolvedConceptReference>();
String childCode = code;
String parentCode = null;
LexBIGServiceConvenienceMethods lbscm = (LexBIGServiceConvenienceMethods) getEvsApplicationService()
.getGenericExtension("LexBIGServiceConvenienceMethods");
do {
AssociatedConcept ac = null;
parentCode = null;
Association assoc = lbscm.getParentsOf(childCode, null,
"hasSubtype", getCodingSchemeUri(), getVersion(), true);
if (assoc != null) {
AssociatedConceptList l = assoc.getAssociatedConcepts();
if (l != null) {
AssociatedConcept[] acl = l.getAssociatedConcept();
if (acl.length > 1) {
logger.warn(acl.length + " parent concepts for "
+ childCode + ". Taking first.");
for (AssociatedConcept c : acl) {
logger.warn("PARENT: " + c.getConceptCode() + ":"
+ c.getEntityDescription().getContent());
}
acl = new AssociatedConcept[] { acl[0] };
}
if (acl.length == 1) {
ac = acl[0];
parentCode = acl[0].getConceptCode();
childCode = parentCode;
}
}
}
if (parentCode != null) {
pathList.add(0, ac);
}
} while (parentCode != null);
// Get the actual leaf concept
CodedNodeSet cns = lbscm.createCodeNodeSet(new String[] { code },
getCodingSchemeUri(), getVersion());
ResolvedConceptReferenceList rcrl = cns.resolveToList(null, null, null,
1);
ResolvedConceptReference[] rcr = rcrl.getResolvedConceptReference();
if (rcr.length == 0) {
logger.debug("Couldn't resolve concept code: " + code);
} else {
pathList.add(rcr[0]);
}
return (ResolvedConceptReference[]) pathList
.toArray(new ResolvedConceptReference[pathList.size()]);
}
public void processNewConcepts() {
try {
ConceptHierarchy hierarchy = getHierarchy();
String getNewConceptCodes = "select distinct(conceptCode) as code "
+ "from SemanticMetadata " + "where conceptCode not in ("
+ "select distinct(code) from ConceptHierarchyNode)";
List conceptCodes = getHibernateTemplate().find(getNewConceptCodes);
for (Iterator i = conceptCodes.iterator(); i.hasNext();) {
String code = (String) i.next();
handleCode(hierarchy, code);
}
} catch (Exception ex) {
String msg = "Error processing new concepts: " + ex.getMessage();
logger.error(msg, ex);
throw new RuntimeException(msg, ex);
}
}
public HibernateTemplate getHibernateTemplate() {
return hibernateTemplate;
}
public void setHibernateTemplate(HibernateTemplate hibernateTemplate) {
this.hibernateTemplate = hibernateTemplate;
}
public EVSApplicationService getEvsApplicationService() {
return evsApplicationService;
}
public void setEvsApplicationService(
EVSApplicationService evsApplicationService) {
this.evsApplicationService = evsApplicationService;
}
public CodingSchemeVersionOrTag getVersion() {
return version;
}
public void setVersion(CodingSchemeVersionOrTag version) {
this.version = version;
}
public String getCodingSchemeUri() {
return codingSchemeUri;
}
public void setCodingSchemeUri(String codingSchemeUri) {
this.codingSchemeUri = codingSchemeUri;
}
public ConceptHierarchyNodeDao getConceptHierarchyNodeDao() {
return conceptHierarchyNodeDao;
}
public void setConceptHierarchyNodeDao(
ConceptHierarchyNodeDao conceptHierarchyNodeDao) {
this.conceptHierarchyNodeDao = conceptHierarchyNodeDao;
}
}
| |
// Copyright 2009 Konrad Twardowski
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.makagiga.commons.validator;
import java.awt.Container;
import java.awt.Rectangle;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.io.Serializable;
import javax.accessibility.AccessibleContext;
import javax.swing.InputVerifier;
import javax.swing.JComponent;
import javax.swing.JLayeredPane;
import javax.swing.JScrollPane;
import javax.swing.SwingUtilities;
import javax.swing.text.JTextComponent;
import org.makagiga.commons.MLogger;
import org.makagiga.commons.TK;
import org.makagiga.commons.UI;
import org.makagiga.commons.WTFError;
import org.makagiga.commons.annotation.InvokedFromConstructor;
import org.makagiga.commons.autocompletion.AutoCompletion;
import org.makagiga.commons.swing.MDialog;
import org.makagiga.commons.swing.MScrollPane;
import org.makagiga.commons.swing.MText;
/**
* @since 3.4, 3.8 (extends InputVerifier)
*/
public abstract class Validator<V> extends InputVerifier implements Serializable {
// public
public enum MessageType { ERROR, INFO, WARNING }
// private
private boolean enabled = true;
private JComponent component;
private MessageType messageType = MessageType.ERROR;
private static final StaticHandler staticHandler = new StaticHandler();
// public
public Validator() { }
public Validator(final JComponent component) {
setComponent(component);
}
public JComponent getComponent() { return component; }
@InvokedFromConstructor
public void setComponent(final JComponent component) {
if (this.component != null) {
this.component.removeFocusListener(staticHandler);
uninstall();
}
this.component = component;
if (this.component != null) {
this.component.addFocusListener(staticHandler);
install();
}
}
/**
* @since 3.6
*/
public boolean isEnabled() { return enabled; }
/**
* @since 3.6
*/
public void setEnabled(final boolean value) { enabled = value; }
public final boolean validate() {
if (!enabled)
return true;
MDialog dialog = getDialog();
if (dialog == null)
return true;
boolean updateMessage = false;
boolean valid;
ValidatorMessage message = dialog.getValidatorMessage();
try {
setMessageType(MessageType.ERROR);
valid = isValid();
if (!valid && (message != null)) {
updateMessage = true;
message.validatedBy = this;
message.setIcon(null);
message.setText(null);
message.setToolTipText(null);
}
}
catch (Exception exception) {
String text = exception.getMessage();
if (message != null) {
updateMessage = true;
message.validatedBy = this;
message.setToolTipText(null);
if (component != null) {
AccessibleContext accessibleContext = component.getAccessibleContext();
if (accessibleContext != null)
message.setToolTipText(accessibleContext.getAccessibleName());
}
switch (messageType) {
case ERROR:
message.setErrorMessage(text);
break;
case INFO:
message.setInfoMessage(text);
break;
case WARNING:
message.setWarningMessage(text);
break;
default:
throw new WTFError(messageType);
}
}
valid = false;
}
//MLogger.debug("validator", "Valid = %s", valid);
if (message != null) {
Container messageParent = message.getParent();
if (valid) {
if (
message.isVisible() &&
// do not hide messages displayed by other validators
((message.validatedBy == this) || TK.isEmpty(message.getText()))
) {
if (
!(messageParent instanceof JLayeredPane) &&
(messageParent != null)
) {
int h = message.getPreferredSize().height;
dialog.setHeight(dialog.getHeight() - h);
}
message.setVisible(false);
// reset auto completion window properties
JTextComponent textComponent = AutoCompletion.getCurrentTextComponent();
if ((textComponent != null) && (textComponent == component)) {
AutoCompletion ac = MText.getAutoCompletion(textComponent);
if (ac != null)
ac.verticalPosition.reset();
}
}
}
else {
if (
updateMessage &&
// do not show empty message
!TK.isEmpty(message.getText())
) {
message.setVisible(true);
// show auto completion window above text field; do not hover message
JTextComponent textComponent = AutoCompletion.getCurrentTextComponent();
if ((textComponent != null) && (textComponent == component)) {
AutoCompletion ac = MText.getAutoCompletion(textComponent);
AutoCompletion.hidePopupWindow();
if (ac != null)
ac.verticalPosition.set(UI.VerticalPosition.TOP);
}
if (messageParent instanceof JLayeredPane) {
if (component != null) {
JScrollPane scrollPane = MScrollPane.getScrollPane(component);
Rectangle r = SwingUtilities.convertRectangle(
(scrollPane != null) ? scrollPane.getParent() : component.getParent(),
(scrollPane != null) ? scrollPane.getBounds() : component.getBounds(),
dialog.getContentPane()
);
int y = r.y + component.getHeight() - (int)(ValidatorMessage.TAIL_SIZE / 1.5f);
message.setLocation(r.x + 5, y);
message.setSize(message.getPreferredSize());
}
else {
message.setLocation(0, 0);
message.setSize(message.getPreferredSize());
}
}
else if (messageParent != null) {
int h = message.getPreferredSize().height;
dialog.setHeight(dialog.getHeight() + h);
}
}
}
}
// MessageType.INFO and MessageType.WARNING types are non-fatal
// and user can accept dialog window.
if (dialog.getOKButton() != null)
dialog.getOKButton().setEnabled(valid || (messageType != MessageType.ERROR));
return valid;
}
// InputVerifier
@Override
public boolean verify(final JComponent input) {
return validate();
}
// protected
protected MDialog getDialog() {
if (component == null) {
MLogger.debug("validator", "Null component");
return null;
}
return MDialog.of(component);
}
protected abstract V getValue();
protected abstract void install();
protected abstract void uninstall();
protected abstract boolean isValid() throws Exception;
protected final boolean revalidate() {
MDialog dialog = getDialog();
if (dialog == null)
return true;
ValidatorSupport validatorSupport = dialog.getValidatorSupport();
if (validatorSupport != null)
return validatorSupport.validate(dialog);
return true;
}
protected final void setMessageType(final MessageType value) { messageType = value; }
// private classes
private static final class StaticHandler implements FocusListener {
// public
@Override
public void focusGained(final FocusEvent e) {
MDialog dialog = MDialog.of(e.getComponent());
if (dialog != null)
dialog.getValidatorSupport().validate(dialog);
}
@Override
public void focusLost(final FocusEvent e) {
MDialog dialog = MDialog.of(e.getComponent());
if (dialog != null) {
ValidatorMessage message = dialog.getValidatorMessage();
if (message != null)
message.setVisible(false);
}
}
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Streams;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.analysis.AnalysisUtils;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.OutputGroupInfo;
import com.google.devtools.build.lib.analysis.PrerequisiteArtifacts;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.CustomCommandLine;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector.InstrumentationSpec;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.IterablesChain;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.packages.AttributeMap;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.BuiltinProvider;
import com.google.devtools.build.lib.packages.Info;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException;
import com.google.devtools.build.lib.packages.SymbolGenerator;
import com.google.devtools.build.lib.packages.TriState;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.rules.android.ZipFilterBuilder.CheckHashMismatchMode;
import com.google.devtools.build.lib.rules.android.databinding.DataBindingContext;
import com.google.devtools.build.lib.rules.cpp.CcInfo;
import com.google.devtools.build.lib.rules.cpp.CcLinkingContext;
import com.google.devtools.build.lib.rules.cpp.CcLinkingContext.LinkOptions;
import com.google.devtools.build.lib.rules.java.BootClassPathInfo;
import com.google.devtools.build.lib.rules.java.ClasspathConfiguredFragment;
import com.google.devtools.build.lib.rules.java.JavaCcLinkParamsProvider;
import com.google.devtools.build.lib.rules.java.JavaCommon;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider.ClasspathType;
import com.google.devtools.build.lib.rules.java.JavaCompilationArtifacts;
import com.google.devtools.build.lib.rules.java.JavaCompilationHelper;
import com.google.devtools.build.lib.rules.java.JavaCompileOutputs;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.rules.java.JavaPluginInfoProvider;
import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider;
import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider.OutputJar;
import com.google.devtools.build.lib.rules.java.JavaSemantics;
import com.google.devtools.build.lib.rules.java.JavaSourceJarsProvider;
import com.google.devtools.build.lib.rules.java.JavaTargetAttributes;
import com.google.devtools.build.lib.rules.java.JavaUtil;
import com.google.devtools.build.lib.rules.java.proto.GeneratedExtensionRegistryProvider;
import com.google.devtools.build.lib.util.FileType;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
/**
* A helper class for android rules.
*
* <p>Helps create the java compilation as well as handling the exporting of the java compilation
* artifacts to the other rules.
*/
public class AndroidCommon {
public static final InstrumentationSpec ANDROID_COLLECTION_SPEC =
JavaCommon.JAVA_COLLECTION_SPEC.withDependencyAttributes(
"deps", "data", "exports", "instruments", "runtime_deps", "binary_under_test");
private static final ImmutableSet<String> TRANSITIVE_ATTRIBUTES =
ImmutableSet.of("deps", "exports");
private static final int DEX_THREADS = 5;
private static final ResourceSet DEX_RESOURCE_SET =
ResourceSet.createWithRamCpu(/* memoryMb= */ 4096.0, /* cpuUsage= */ DEX_THREADS);
public static final <T extends Info> Iterable<T> getTransitivePrerequisites(
RuleContext ruleContext, BuiltinProvider<T> key) {
IterablesChain.Builder<T> builder = IterablesChain.builder();
AttributeMap attributes = ruleContext.attributes();
for (String attr : TRANSITIVE_ATTRIBUTES) {
if (attributes.has(attr, BuildType.LABEL_LIST)) {
builder.add(ruleContext.getPrerequisites(attr, key));
}
}
return builder.build();
}
private final RuleContext ruleContext;
private final JavaCommon javaCommon;
private final boolean asNeverLink;
private NestedSet<Artifact> filesToBuild;
private NestedSet<Artifact> transitiveNeverlinkLibraries =
NestedSetBuilder.emptySet(Order.STABLE_ORDER);
private JavaCompilationArgsProvider javaCompilationArgs = JavaCompilationArgsProvider.EMPTY;
private NestedSet<Artifact> jarsProducedForRuntime;
private Artifact classJar;
private JavaCompileOutputs<Artifact> outputs;
private Artifact iJar;
private Artifact srcJar;
private Artifact resourceSourceJar;
private GeneratedExtensionRegistryProvider generatedExtensionRegistryProvider;
private final JavaSourceJarsProvider.Builder javaSourceJarsProviderBuilder =
JavaSourceJarsProvider.builder();
private final JavaRuleOutputJarsProvider.Builder javaRuleOutputJarsProviderBuilder =
JavaRuleOutputJarsProvider.builder();
private AndroidIdlHelper idlHelper;
public AndroidCommon(JavaCommon javaCommon) {
this(javaCommon, JavaCommon.isNeverLink(javaCommon.getRuleContext()));
}
/**
* Creates a new AndroidCommon.
*
* @param common the JavaCommon instance
* @param asNeverLink Boolean to indicate if this rule should be treated as a compile time dep by
* consuming rules.
*/
public AndroidCommon(JavaCommon common, boolean asNeverLink) {
this.ruleContext = common.getRuleContext();
this.asNeverLink = asNeverLink;
this.javaCommon = common;
}
/**
* Collects the transitive neverlink dependencies.
*
* @param ruleContext the context of the rule neverlink deps are to be computed for
* @param deps the targets to be treated as dependencies
* @param runtimeJars the runtime jars produced by the rule (non-transitive)
* @return a nested set of the neverlink deps.
*/
public static NestedSet<Artifact> collectTransitiveNeverlinkLibraries(
RuleContext ruleContext,
Iterable<? extends TransitiveInfoCollection> deps,
NestedSet<Artifact> runtimeJars) {
NestedSetBuilder<Artifact> neverlinkedRuntimeJars = NestedSetBuilder.naiveLinkOrder();
for (AndroidNeverLinkLibrariesProvider provider :
AnalysisUtils.getProviders(deps, AndroidNeverLinkLibrariesProvider.class)) {
neverlinkedRuntimeJars.addTransitive(provider.getTransitiveNeverLinkLibraries());
}
if (JavaCommon.isNeverLink(ruleContext)) {
neverlinkedRuntimeJars.addTransitive(runtimeJars);
for (JavaCompilationArgsProvider provider :
JavaInfo.getProvidersFromListOfTargets(JavaCompilationArgsProvider.class, deps)) {
neverlinkedRuntimeJars.addTransitive(provider.getRuntimeJars());
}
}
return neverlinkedRuntimeJars.build();
}
/**
* Creates an action that converts {@code jarToDex} to a dex file. The output will be stored in
* the {@link com.google.devtools.build.lib.actions.Artifact} {@code dxJar}.
*/
public static void createDexAction(
RuleContext ruleContext,
Artifact jarToDex,
Artifact classesDex,
List<String> dexOptions,
boolean multidex,
Artifact mainDexList) {
CustomCommandLine.Builder commandLine = CustomCommandLine.builder();
commandLine.add("--dex");
// Multithreaded dex does not work when using --multi-dex.
if (!multidex) {
// Multithreaded dex tends to run faster, but only up to about 5 threads (at which point the
// law of diminishing returns kicks in). This was determined experimentally, with 5-thread dex
// performing about 25% faster than 1-thread dex.
commandLine.add("--num-threads=" + DEX_THREADS);
}
commandLine.addAll(dexOptions);
if (multidex) {
commandLine.add("--multi-dex");
if (mainDexList != null) {
commandLine.addPrefixedExecPath("--main-dex-list=", mainDexList);
}
}
commandLine.addPrefixedExecPath("--output=", classesDex);
commandLine.addExecPath(jarToDex);
SpawnAction.Builder builder =
new SpawnAction.Builder()
.useDefaultShellEnvironment()
.setExecutable(AndroidSdkProvider.fromRuleContext(ruleContext).getDx())
.addInput(jarToDex)
.addOutput(classesDex)
.setProgressMessage("Converting %s to dex format", jarToDex.getExecPathString())
.setMnemonic("AndroidDexer")
.addCommandLine(commandLine.build())
// TODO(ulfjack): Use 1 CPU if multidex is true?
.setResources(DEX_RESOURCE_SET);
if (mainDexList != null) {
builder.addInput(mainDexList);
}
ruleContext.registerAction(builder.build(ruleContext));
}
public static AndroidIdeInfoProvider createAndroidIdeInfoProvider(
RuleContext ruleContext,
AndroidIdlHelper idlHelper,
OutputJar resourceJar,
Artifact aar,
ResourceApk resourceApk,
Artifact zipAlignedApk,
Iterable<Artifact> apksUnderTest,
NativeLibs nativeLibs) {
AndroidIdeInfoProvider.Builder ideInfoProviderBuilder =
new AndroidIdeInfoProvider.Builder()
.setIdlClassJar(idlHelper.getIdlClassJar())
.setIdlSourceJar(idlHelper.getIdlSourceJar())
.setResourceJar(resourceJar)
.setAar(aar)
.setNativeLibs(nativeLibs.getMap())
.addIdlImportRoot(idlHelper.getIdlImportRoot())
.addIdlSrcs(idlHelper.getIdlSources())
.addIdlGeneratedJavaFiles(idlHelper.getIdlGeneratedJavaSources())
.addAllApksUnderTest(apksUnderTest);
if (zipAlignedApk != null) {
ideInfoProviderBuilder.setApk(zipAlignedApk);
}
// If the rule defines resources, put those in the IDE info.
if (AndroidResources.definesAndroidResources(ruleContext.attributes())) {
ideInfoProviderBuilder
.setDefinesAndroidResources(true)
// Sets the possibly merged manifest and the raw manifest.
.setGeneratedManifest(resourceApk.getManifest())
.setManifest(ruleContext.getPrerequisiteArtifact("manifest"))
.setJavaPackage(getJavaPackage(ruleContext))
.setResourceApk(resourceApk.getArtifact());
}
return ideInfoProviderBuilder.build();
}
/**
* Gets the Java package for the current target.
*
* @deprecated If no custom_package is specified, this method will derive the Java package from
* the package path, even if that path is not a valid Java path. Use {@code
* AndroidManifest#getAndroidPackage(RuleContext)}} instead.
*/
@Deprecated
public static String getJavaPackage(RuleContext ruleContext) {
AttributeMap attributes = ruleContext.attributes();
if (attributes.isAttributeValueExplicitlySpecified("custom_package")) {
return attributes.get("custom_package", Type.STRING);
}
return getDefaultJavaPackage(ruleContext.getRule());
}
private static String getDefaultJavaPackage(Rule rule) {
PathFragment nameFragment = rule.getPackage().getNameFragment();
String packageName = JavaUtil.getJavaFullClassname(nameFragment);
if (packageName != null) {
return packageName;
} else {
// This is a workaround for libraries that don't follow the standard Bazel package format
return nameFragment.getPathString().replace('/', '.');
}
}
static PathFragment getSourceDirectoryRelativePathFromResource(Artifact resource) {
PathFragment resourceDir = AndroidResources.findResourceDir(resource);
if (resourceDir == null) {
return null;
}
return trimTo(resource.getRootRelativePath(), resourceDir);
}
/**
* Finds the rightmost occurrence of the needle and returns subfragment of the haystack from left
* to the end of the occurrence inclusive of the needle.
*
* <pre>
* `Example:
* Given the haystack:
* res/research/handwriting/res/values/strings.xml
* And the needle:
* res
* Returns:
* res/research/handwriting/res
* </pre>
*/
static PathFragment trimTo(PathFragment haystack, PathFragment needle) {
if (needle.equals(PathFragment.EMPTY_FRAGMENT)) {
return haystack;
}
List<String> needleSegments = needle.getSegments();
// Compute the overlap offset for duplicated parts of the needle.
int[] overlap = new int[needleSegments.size() + 1];
// Start overlap at -1, as it will cancel out the increment in the search.
// See http://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%80%93Pratt_algorithm for the
// details.
overlap[0] = -1;
for (int i = 0, j = -1; i < needleSegments.size(); j++, i++, overlap[i] = j) {
while (j >= 0 && !needleSegments.get(i).equals(needleSegments.get(j))) {
// Walk the overlap until the bound is found.
j = overlap[j];
}
}
// TODO(corysmith): reverse the search algorithm.
// Keep the index of the found so that the rightmost index is taken.
List<String> haystackSegments = haystack.getSegments();
int found = -1;
for (int i = 0, j = 0; i < haystackSegments.size(); i++) {
while (j >= 0 && !haystackSegments.get(i).equals(needleSegments.get(j))) {
// Not matching, walk the needle index to attempt another match.
j = overlap[j];
}
j++;
// Needle index is exhausted, so the needle must match.
if (j == needleSegments.size()) {
// Record the found index + 1 to be inclusive of the end index.
found = i + 1;
// Subtract one from the needle index to restart the search process
j = j - 1;
}
}
if (found != -1) {
// Return the subsection of the haystack.
return haystack.subFragment(0, found);
}
throw new IllegalArgumentException(String.format("%s was not found in %s", needle, haystack));
}
public static NestedSetBuilder<Artifact> collectTransitiveNativeLibs(RuleContext ruleContext) {
NestedSetBuilder<Artifact> transitiveNativeLibs = NestedSetBuilder.naiveLinkOrder();
Iterable<AndroidNativeLibsInfo> infos =
getTransitivePrerequisites(ruleContext, AndroidNativeLibsInfo.PROVIDER);
for (AndroidNativeLibsInfo nativeLibsZipsInfo : infos) {
transitiveNativeLibs.addTransitive(nativeLibsZipsInfo.getNativeLibs());
}
return transitiveNativeLibs;
}
static boolean getExportsManifest(RuleContext ruleContext) {
// AndroidLibraryBaseRule has exports_manifest but AndroidBinaryBaseRule does not.
// ResourceContainers are built for both, so we must check if exports_manifest is present.
if (!ruleContext.attributes().has("exports_manifest", BuildType.TRISTATE)) {
return false;
}
TriState attributeValue = ruleContext.attributes().get("exports_manifest", BuildType.TRISTATE);
// If the rule does not have the Android configuration fragment, we default to false.
boolean exportsManifestDefault =
ruleContext.isLegalFragment(AndroidConfiguration.class)
&& ruleContext.getFragment(AndroidConfiguration.class).getExportsManifestDefault();
return attributeValue == TriState.YES
|| (attributeValue == TriState.AUTO && exportsManifestDefault);
}
/** Returns the artifact for the debug key for signing the APK. */
static ImmutableList<Artifact> getApkDebugSigningKeys(RuleContext ruleContext) {
ImmutableList<Artifact> keys =
ruleContext.getPrerequisiteArtifacts("debug_signing_keys").list();
if (!keys.isEmpty()) {
return keys;
}
return ImmutableList.of(ruleContext.getHostPrerequisiteArtifact("debug_key"));
}
private void compileResources(
JavaSemantics javaSemantics,
Artifact resourceJavaClassJar,
Artifact resourceJavaSrcJar,
JavaCompilationArtifacts.Builder artifactsBuilder,
JavaTargetAttributes.Builder attributes,
NestedSetBuilder<Artifact> filesBuilder)
throws InterruptedException, RuleErrorException {
packResourceSourceJar(javaSemantics, resourceJavaSrcJar);
// Add the compiled resource jar to the classpath of the main compilation.
attributes.addDirectJars(NestedSetBuilder.create(Order.STABLE_ORDER, resourceJavaClassJar));
// Add the compiled resource jar to the classpath of consuming targets.
// We don't actually use the ijar. That is almost the same as the resource class jar
// except for <clinit>, but it takes time to build and waiting for that to build would
// just delay building the rest of the library.
artifactsBuilder.addCompileTimeJarAsFullJar(resourceJavaClassJar);
// Add the compiled resource jar as a declared output of the rule.
filesBuilder.add(resourceSourceJar);
filesBuilder.add(resourceJavaClassJar);
}
private void packResourceSourceJar(JavaSemantics javaSemantics, Artifact resourcesJavaSrcJar)
throws InterruptedException {
resourceSourceJar =
ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_RESOURCES_SOURCE_JAR);
JavaTargetAttributes.Builder javacAttributes =
new JavaTargetAttributes.Builder(javaSemantics).addSourceJar(resourcesJavaSrcJar);
JavaCompilationHelper javacHelper =
new JavaCompilationHelper(ruleContext, javaSemantics, getJavacOpts(), javacAttributes);
javacHelper.createSourceJarAction(resourceSourceJar, null);
}
public JavaTargetAttributes init(
JavaSemantics javaSemantics,
AndroidSemantics androidSemantics,
ResourceApk resourceApk,
boolean addCoverageSupport,
boolean collectJavaCompilationArgs,
boolean isBinary,
NestedSet<Artifact> excludedRuntimeArtifacts,
boolean generateExtensionRegistry)
throws InterruptedException, RuleErrorException {
classJar = ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_LIBRARY_CLASS_JAR);
idlHelper = new AndroidIdlHelper(ruleContext, classJar);
BootClassPathInfo bootClassPathInfo;
AndroidSdkProvider androidSdkProvider = AndroidSdkProvider.fromRuleContext(ruleContext);
if (androidSdkProvider.getSystem() != null) {
bootClassPathInfo = androidSdkProvider.getSystem();
} else {
NestedSetBuilder<Artifact> bootclasspath = NestedSetBuilder.<Artifact>stableOrder();
if (getAndroidConfig(ruleContext).desugarJava8()) {
bootclasspath.addTransitive(
PrerequisiteArtifacts.nestedSet(ruleContext, "$desugar_java8_extra_bootclasspath"));
}
bootclasspath.add(androidSdkProvider.getAndroidJar());
bootClassPathInfo = BootClassPathInfo.create(bootclasspath.build());
}
ImmutableList.Builder<String> javacopts = ImmutableList.builder();
javacopts.addAll(androidSemantics.getCompatibleJavacOptions(ruleContext));
resourceApk
.asDataBindingContext()
.supplyJavaCoptsUsing(ruleContext, isBinary, javacopts::addAll);
JavaTargetAttributes.Builder attributesBuilder =
javaCommon
.initCommon(idlHelper.getIdlGeneratedJavaSources(), javacopts.build())
.setBootClassPath(bootClassPathInfo);
resourceApk
.asDataBindingContext()
.supplyAnnotationProcessor(
ruleContext,
(plugin, additionalOutputs) -> {
attributesBuilder.addPlugin(plugin);
attributesBuilder.addAdditionalOutputs(additionalOutputs);
});
if (excludedRuntimeArtifacts != null) {
attributesBuilder.addExcludedArtifacts(excludedRuntimeArtifacts);
}
JavaCompilationArtifacts.Builder artifactsBuilder = new JavaCompilationArtifacts.Builder();
NestedSetBuilder<Artifact> jarsProducedForRuntime = NestedSetBuilder.<Artifact>stableOrder();
NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.<Artifact>stableOrder();
Artifact resourceJavaSrcJar = resourceApk.getResourceJavaSrcJar();
if (resourceJavaSrcJar != null) {
filesBuilder.add(resourceJavaSrcJar);
compileResources(
javaSemantics,
resourceApk.getResourceJavaClassJar(),
resourceJavaSrcJar,
artifactsBuilder,
attributesBuilder,
filesBuilder);
// Combined resource constants needs to come even before our own classes that may contain
// local resource constants.
artifactsBuilder.addRuntimeJar(resourceApk.getResourceJavaClassJar());
jarsProducedForRuntime.add(resourceApk.getResourceJavaClassJar());
}
// Databinding metadata that the databinding annotation processor reads.
ImmutableList<Artifact> additionalJavaInputsFromDatabinding =
resourceApk.asDataBindingContext().processDeps(ruleContext, isBinary);
JavaCompilationHelper helper =
initAttributes(attributesBuilder, javaSemantics, additionalJavaInputsFromDatabinding);
if (ruleContext.hasErrors()) {
return null;
}
if (addCoverageSupport) {
androidSemantics.addCoverageSupport(
ruleContext, this, javaSemantics, true, attributesBuilder, artifactsBuilder);
if (ruleContext.hasErrors()) {
return null;
}
}
JavaTargetAttributes attributes = attributesBuilder.build();
initJava(
javaSemantics,
helper,
attributes,
artifactsBuilder,
collectJavaCompilationArgs,
filesBuilder,
generateExtensionRegistry);
if (ruleContext.hasErrors()) {
return null;
}
if (generatedExtensionRegistryProvider != null) {
jarsProducedForRuntime.add(generatedExtensionRegistryProvider.getClassJar());
}
this.jarsProducedForRuntime = jarsProducedForRuntime.add(classJar).build();
return attributes;
}
private JavaCompilationHelper initAttributes(
JavaTargetAttributes.Builder attributes,
JavaSemantics semantics,
ImmutableList<Artifact> additionalArtifacts) {
JavaCompilationHelper helper =
new JavaCompilationHelper(
ruleContext,
semantics,
javaCommon.getJavacOpts(),
attributes,
additionalArtifacts,
/*disableStrictDeps=*/ false);
helper.addLibrariesToAttributes(javaCommon.targetsTreatedAsDeps(ClasspathType.COMPILE_ONLY));
attributes.setTargetLabel(ruleContext.getLabel());
ruleContext.checkSrcsSamePackage(true);
return helper;
}
private void initJava(
JavaSemantics javaSemantics,
JavaCompilationHelper helper,
JavaTargetAttributes attributes,
JavaCompilationArtifacts.Builder javaArtifactsBuilder,
boolean collectJavaCompilationArgs,
NestedSetBuilder<Artifact> filesBuilder,
boolean generateExtensionRegistry)
throws InterruptedException {
if (ruleContext.hasErrors()) {
// Avoid leaving filesToBuild set to null, otherwise we'll get a NullPointerException masking
// the real error.
filesToBuild = filesBuilder.build();
return;
}
Artifact jar = null;
if (attributes.hasSources() || attributes.hasResources()) {
// We only want to add a jar to the classpath of a dependent rule if it has content.
javaArtifactsBuilder.addRuntimeJar(classJar);
jar = classJar;
}
filesBuilder.add(classJar);
outputs = helper.createOutputs(classJar);
javaArtifactsBuilder.setCompileTimeDependencies(outputs.depsProto());
srcJar = ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_LIBRARY_SOURCE_JAR);
javaSourceJarsProviderBuilder
.addSourceJar(srcJar)
.addAllTransitiveSourceJars(javaCommon.collectTransitiveSourceJars(srcJar));
helper.createSourceJarAction(srcJar, outputs.genSource());
helper.createCompileAction(outputs);
if (generateExtensionRegistry) {
generatedExtensionRegistryProvider =
javaSemantics.createGeneratedExtensionRegistry(
ruleContext,
javaCommon,
filesBuilder,
javaArtifactsBuilder,
javaRuleOutputJarsProviderBuilder,
javaSourceJarsProviderBuilder);
}
filesToBuild = filesBuilder.build();
if ((attributes.hasSources()) && jar != null) {
iJar = helper.createCompileTimeJarAction(jar, javaArtifactsBuilder);
}
JavaCompilationArtifacts javaArtifacts = javaArtifactsBuilder.build();
javaCommon.setJavaCompilationArtifacts(javaArtifacts);
javaCommon.setClassPathFragment(
new ClasspathConfiguredFragment(
javaCommon.getJavaCompilationArtifacts(),
attributes,
asNeverLink,
helper.getBootclasspathOrDefault()));
transitiveNeverlinkLibraries =
collectTransitiveNeverlinkLibraries(
ruleContext,
javaCommon.getDependencies(),
NestedSetBuilder.<Artifact>naiveLinkOrder()
.addAll(javaCommon.getJavaCompilationArtifacts().getRuntimeJars())
.build());
if (collectJavaCompilationArgs) {
boolean hasSources = attributes.hasSources();
this.javaCompilationArgs = collectJavaCompilationArgs(asNeverLink, hasSources);
}
}
public RuleConfiguredTargetBuilder addTransitiveInfoProviders(
RuleConfiguredTargetBuilder builder,
Artifact aar,
ResourceApk resourceApk,
Artifact zipAlignedApk,
Iterable<Artifact> apksUnderTest,
NativeLibs nativeLibs,
boolean isNeverlink,
boolean isLibrary) {
idlHelper.addTransitiveInfoProviders(builder, classJar, outputs.manifestProto());
if (generatedExtensionRegistryProvider != null) {
builder.addNativeDeclaredProvider(generatedExtensionRegistryProvider);
}
OutputJar resourceJar = null;
if (resourceApk.getResourceJavaClassJar() != null && resourceSourceJar != null) {
resourceJar =
new OutputJar(
resourceApk.getResourceJavaClassJar(),
null /* ijar */,
outputs.manifestProto(),
ImmutableList.of(resourceSourceJar));
javaRuleOutputJarsProviderBuilder.addOutputJar(resourceJar);
}
JavaRuleOutputJarsProvider ruleOutputJarsProvider =
javaRuleOutputJarsProviderBuilder
.addOutputJar(classJar, iJar, outputs.manifestProto(), ImmutableList.of(srcJar))
.setJdeps(outputs.depsProto())
.setNativeHeaders(outputs.nativeHeader())
.build();
JavaSourceJarsProvider sourceJarsProvider = javaSourceJarsProviderBuilder.build();
JavaCompilationArgsProvider compilationArgsProvider = javaCompilationArgs;
JavaInfo.Builder javaInfoBuilder = JavaInfo.Builder.create();
javaCommon.addTransitiveInfoProviders(
builder, javaInfoBuilder, filesToBuild, classJar, ANDROID_COLLECTION_SPEC);
javaCommon.addGenJarsProvider(
builder, javaInfoBuilder, outputs.genClass(), outputs.genSource());
resourceApk.asDataBindingContext().addProvider(builder, ruleContext);
JavaInfo javaInfo =
javaInfoBuilder
.addProvider(JavaCompilationArgsProvider.class, compilationArgsProvider)
.addProvider(JavaRuleOutputJarsProvider.class, ruleOutputJarsProvider)
.addProvider(JavaSourceJarsProvider.class, sourceJarsProvider)
.addProvider(JavaPluginInfoProvider.class, JavaCommon.getTransitivePlugins(ruleContext))
.setRuntimeJars(javaCommon.getJavaCompilationArtifacts().getRuntimeJars())
.setJavaConstraints(ImmutableList.of("android"))
.setNeverlink(isNeverlink)
.build();
// Do not convert the ResourceApk into builtin providers when it is created from
// Starlark via AndroidApplicationResourceInfo, because native dependency providers are not
// created in the Starlark pipeline.
if (resourceApk.isFromAndroidApplicationResourceInfo()
|| (ruleContext
.getFragment(AndroidConfiguration.class)
.omitResourcesInfoProviderFromAndroidBinary()
&& !isLibrary)) {
// Binary rule; allow extracting merged manifest from Starlark via
// ctx.attr.android_binary.android.merged_manifest, but not much more.
builder.addStarlarkTransitiveInfo(
AndroidStarlarkApiProvider.NAME, new AndroidStarlarkApiProvider(/*resourceInfo=*/ null));
} else {
resourceApk.addToConfiguredTargetBuilder(
builder, ruleContext.getLabel(), /* includeStarlarkApiProvider = */ true, isLibrary);
}
return builder
.setFilesToBuild(filesToBuild)
.addNativeDeclaredProvider(javaInfo)
.addProvider(RunfilesProvider.class, RunfilesProvider.simple(getRunfiles()))
.addNativeDeclaredProvider(
createAndroidIdeInfoProvider(
ruleContext,
idlHelper,
resourceJar,
aar,
resourceApk,
zipAlignedApk,
apksUnderTest,
nativeLibs))
.addOutputGroup(
OutputGroupInfo.HIDDEN_TOP_LEVEL, collectHiddenTopLevelArtifacts(ruleContext))
.addOutputGroup(
JavaSemantics.SOURCE_JARS_OUTPUT_GROUP, sourceJarsProvider.getTransitiveSourceJars());
}
private Runfiles getRunfiles() {
// TODO(bazel-team): why return any Runfiles in the neverlink case?
if (asNeverLink) {
return new Runfiles.Builder(
ruleContext.getWorkspaceName(),
ruleContext.getConfiguration().legacyExternalRunfiles())
.addRunfiles(ruleContext, RunfilesProvider.DEFAULT_RUNFILES)
.build();
}
return JavaCommon.getRunfiles(
ruleContext,
javaCommon.getJavaSemantics(),
javaCommon.getJavaCompilationArtifacts(),
asNeverLink);
}
/**
* Collects Java compilation arguments for this target.
*
* @param isNeverLink Whether the target has the 'neverlink' attr.
* @param hasSrcs If false, deps are exported (deprecated behaviour)
*/
private JavaCompilationArgsProvider collectJavaCompilationArgs(
boolean isNeverLink, boolean hasSrcs) {
boolean exportDeps =
!hasSrcs
&& ruleContext
.getFragment(AndroidConfiguration.class)
.allowSrcsLessAndroidLibraryDeps(ruleContext);
return javaCommon.collectJavaCompilationArgs(isNeverLink, exportDeps);
}
public ImmutableList<String> getJavacOpts() {
return javaCommon.getJavacOpts();
}
public ImmutableList<Artifact> getRuntimeJars() {
return javaCommon.getJavaCompilationArtifacts().getRuntimeJars();
}
/**
* Returns Jars produced by this rule that may go into the runtime classpath. By contrast {@link
* #getRuntimeJars()} returns the complete runtime classpath needed by this rule, including
* dependencies.
*/
public NestedSet<Artifact> getJarsProducedForRuntime() {
return jarsProducedForRuntime;
}
public Artifact getClassJar() {
return classJar;
}
public NestedSet<Artifact> getTransitiveNeverLinkLibraries() {
return transitiveNeverlinkLibraries;
}
public boolean isNeverLink() {
return asNeverLink;
}
CcInfo getCcInfo() {
return getCcInfo(
javaCommon.targetsTreatedAsDeps(ClasspathType.BOTH),
ImmutableList.of(),
ruleContext.getLabel(),
ruleContext.getSymbolGenerator());
}
static CcInfo getCcInfo(
final Iterable<? extends TransitiveInfoCollection> deps,
final ImmutableList<String> linkOpts,
Label label,
SymbolGenerator<?> symbolGenerator) {
CcLinkingContext ccLinkingContext =
CcLinkingContext.builder()
.setOwner(label)
.addUserLinkFlags(ImmutableList.of(LinkOptions.of(linkOpts, symbolGenerator)))
.build();
CcInfo linkoptsCcInfo = CcInfo.builder().setCcLinkingContext(ccLinkingContext).build();
ImmutableList<CcInfo> ccInfos =
ImmutableList.<CcInfo>builder()
.add(linkoptsCcInfo)
.addAll(
Streams.stream(AnalysisUtils.getProviders(deps, JavaCcLinkParamsProvider.PROVIDER))
.map(JavaCcLinkParamsProvider::getCcInfo)
.collect(ImmutableList.toImmutableList()))
.addAll(
Streams.stream(
AnalysisUtils.getProviders(deps, AndroidCcLinkParamsProvider.PROVIDER))
.map(AndroidCcLinkParamsProvider::getLinkParams)
.collect(ImmutableList.toImmutableList()))
.addAll(AnalysisUtils.getProviders(deps, CcInfo.PROVIDER))
.build();
return CcInfo.merge(ccInfos);
}
/** Returns {@link AndroidConfiguration} in given context. */
public static AndroidConfiguration getAndroidConfig(RuleContext context) {
return context.getConfiguration().getFragment(AndroidConfiguration.class);
}
private NestedSet<Artifact> collectHiddenTopLevelArtifacts(RuleContext ruleContext) {
NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder();
for (OutputGroupInfo provider :
getTransitivePrerequisites(ruleContext, OutputGroupInfo.STARLARK_CONSTRUCTOR)) {
builder.addTransitive(provider.getOutputGroup(OutputGroupInfo.HIDDEN_TOP_LEVEL));
}
return builder.build();
}
/**
* Returns a {@link JavaCommon} instance with Android data binding support.
*
* <p>Binaries need both compile-time and runtime support, while libraries only need compile-time
* support.
*
* <p>No rule needs <i>any</i> support if data binding is disabled.
*/
static JavaCommon createJavaCommonWithAndroidDataBinding(
RuleContext ruleContext,
JavaSemantics semantics,
DataBindingContext dataBindingContext,
boolean isLibrary) {
ImmutableList<Artifact> ruleSources = ruleContext.getPrerequisiteArtifacts("srcs").list();
ImmutableList<Artifact> dataBindingSources =
dataBindingContext.getAnnotationSourceFiles(ruleContext);
ImmutableList<Artifact> srcs = ImmutableList.<Artifact>builder()
.addAll(ruleSources)
.addAll(dataBindingSources)
.build();
ImmutableList<TransitiveInfoCollection> compileDeps;
ImmutableList<TransitiveInfoCollection> runtimeDeps;
ImmutableList<TransitiveInfoCollection> bothDeps;
if (isLibrary) {
compileDeps = JavaCommon.defaultDeps(ruleContext, semantics, ClasspathType.COMPILE_ONLY);
compileDeps = AndroidIdlHelper.maybeAddSupportLibs(ruleContext, compileDeps);
runtimeDeps = JavaCommon.defaultDeps(ruleContext, semantics, ClasspathType.RUNTIME_ONLY);
bothDeps = JavaCommon.defaultDeps(ruleContext, semantics, ClasspathType.BOTH);
} else {
// Binary:
compileDeps = ImmutableList.copyOf(ruleContext.getPrerequisites("deps"));
runtimeDeps = compileDeps;
bothDeps = compileDeps;
}
return new JavaCommon(ruleContext, semantics, srcs, compileDeps, runtimeDeps, bothDeps);
}
/**
* Gets the transitive support APKs required by this rule through the {@code support_apks}
* attribute.
*/
static NestedSet<Artifact> getSupportApks(RuleContext ruleContext) {
NestedSetBuilder<Artifact> supportApks = NestedSetBuilder.stableOrder();
for (TransitiveInfoCollection dep : ruleContext.getPrerequisites("support_apks")) {
ApkInfo apkProvider = dep.get(ApkInfo.PROVIDER);
FileProvider fileProvider = dep.getProvider(FileProvider.class);
// If ApkInfo is present, do not check FileProvider for .apk files. For example,
// android_binary creates a FileProvider containing both the signed and unsigned APKs.
if (apkProvider != null) {
supportApks.add(apkProvider.getApk());
} else if (fileProvider != null) {
// The rule definition should enforce that only .apk files are allowed, however, it can't
// hurt to double check.
supportApks.addAll(
FileType.filter(fileProvider.getFilesToBuild().toList(), AndroidRuleClasses.APK));
}
}
return supportApks.build();
}
/**
* Used for instrumentation tests. Filter out classes from the instrumentation JAR that are also
* present in the target JAR. During an instrumentation test, ART will load jars from both APKs
* into the same classloader. If the same class exists in both jars, there will be runtime
* crashes.
*
* <p>R.class files that share the same package are also filtered out to prevent
* surprising/incorrect references to resource IDs.
*/
public static void createZipFilterAction(
RuleContext ruleContext,
Artifact in,
Artifact filter,
Artifact out,
CheckHashMismatchMode checkHashMismatch,
boolean removeAllRClasses) {
ZipFilterBuilder builder =
new ZipFilterBuilder(ruleContext)
.setInputZip(in)
.addFilterZips(ImmutableList.of(filter))
.setOutputZip(out)
.addFileTypeToFilter(".class")
.setCheckHashMismatchMode(checkHashMismatch)
// These files are generated by databinding in both the target and the instrumentation
// app with different contents. We want to keep the one from the target app.
.addExplicitFilter("/BR\\.class$")
.addExplicitFilter("/databinding/[^/]+Binding\\.class$");
if (removeAllRClasses) {
builder.addExplicitFilter("R\\.class").addExplicitFilter("R\\$.*\\.class");
}
builder.build();
}
}
| |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.docker.client;
import com.google.common.collect.Queues;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.channels.Channels;
import java.nio.channels.SocketChannel;
import java.util.Queue;
import jnr.unixsocket.UnixSocketAddress;
import jnr.unixsocket.UnixSocketChannel;
/**
* Provides a socket that wraps an jnr.unixsocket.UnixSocketChannel and delays setting options
* until the socket is connected. This is necessary because the Apache HTTP client attempts to
* set options prior to connecting the socket, which doesn't work for Unix sockets since options
* are being set on the underlying file descriptor. Until the socket is connected, the file
* descriptor doesn't exist.
*
* This class also noop's any calls to setReuseAddress, which is called by the Apache client but
* isn't supported by AFUnixSocket.
*/
public class ApacheUnixSocket extends Socket {
private final UnixSocketChannel inner;
private SocketAddress addr;
private int lingerTime;
private final Queue<SocketOptionSetter> optionsToSet = Queues.newArrayDeque();
public ApacheUnixSocket() throws IOException {
this.inner = UnixSocketChannel.open();
this.addr = null;
}
@Override
public void connect(final SocketAddress endpoint) throws IOException {
if (endpoint instanceof UnixSocketAddress) {
addr = endpoint;
inner.connect((UnixSocketAddress) endpoint);
setAllSocketOptions();
}
}
@Override
public void connect(final SocketAddress endpoint, final int timeout) throws IOException {
if (endpoint instanceof UnixSocketAddress) {
addr = endpoint;
inner.connect((UnixSocketAddress) endpoint);
setAllSocketOptions();
}
}
@Override
public void bind(final SocketAddress bindpoint) throws IOException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public InetAddress getInetAddress() {
if (inner.isConnected()) {
try {
return InetAddress.getByName("localhost");
} catch (UnknownHostException e) {
return null;
}
}
return null;
}
@Override
public InetAddress getLocalAddress() {
try {
return InetAddress.getByAddress(new byte [] {0, 0, 0, 0}); // not bound
} catch (UnknownHostException e) {
return null;
}
}
@Override
public int getPort() {
return -1; // meaningless for UNIX sockets
}
@Override
public int getLocalPort() {
return -1; // not bound
}
@Override
public SocketAddress getRemoteSocketAddress() {
return addr;
}
@Override
public SocketAddress getLocalSocketAddress() {
return null; // not bound
}
@Override
public SocketChannel getChannel() {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public InputStream getInputStream() throws IOException {
return Channels.newInputStream(inner);
}
@Override
public OutputStream getOutputStream() throws IOException {
return Channels.newOutputStream(inner);
}
private void setSocketOption(final SocketOptionSetter s) throws SocketException {
if (inner.isConnected()) {
s.run();
} else {
if (!optionsToSet.offer(s)) {
throw new SocketException("Failed to queue option");
}
}
}
private void setAllSocketOptions() throws SocketException {
for (SocketOptionSetter s : optionsToSet) {
s.run();
}
}
@Override
public void setTcpNoDelay(final boolean on) throws SocketException {
}
@Override
public boolean getTcpNoDelay() throws SocketException {
return false;
}
@Override
public void setSoLinger(final boolean on, final int linger) throws SocketException {
if (on) {
lingerTime = linger;
}
}
@Override
public int getSoLinger() throws SocketException {
return lingerTime;
}
@Override
public void sendUrgentData(final int data) throws IOException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public void setOOBInline(final boolean on) throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public boolean getOOBInline() throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public synchronized void setSoTimeout(final int timeout) throws SocketException {
setSocketOption(new SocketOptionSetter() {
@Override
public void run() throws SocketException {
inner.setSoTimeout(timeout);
}
});
}
@Override
public synchronized int getSoTimeout() throws SocketException {
return inner.getSoTimeout();
}
@Override
public synchronized void setSendBufferSize(final int size) throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public synchronized int getSendBufferSize() throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public synchronized void setReceiveBufferSize(final int size) throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public synchronized int getReceiveBufferSize() throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public void setKeepAlive(final boolean on) throws SocketException {
setSocketOption(new SocketOptionSetter() {
@Override
public void run() throws SocketException {
inner.setKeepAlive(on);
}
});
}
@Override
public boolean getKeepAlive() throws SocketException {
return inner.getKeepAlive();
}
@Override
public void setTrafficClass(final int tc) throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public int getTrafficClass() throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public void setReuseAddress(final boolean on) throws SocketException {
// not supported: Apache client tries to set it, but we want to just ignore it
}
@Override
public boolean getReuseAddress() throws SocketException {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public synchronized void close() throws IOException {
if (lingerTime > 0) {
boolean sleeping = true;
while (sleeping) {
try {
wait(lingerTime * (long) 1000);
} catch (InterruptedException e) {
}
sleeping = false;
}
}
shutdownInput();
shutdownOutput();
inner.close();
}
@Override
public void shutdownInput() throws IOException {
inner.shutdownInput();
}
@Override
public void shutdownOutput() throws IOException {
inner.shutdownOutput();
}
@Override
public String toString() {
if (addr != null) {
return ((UnixSocketAddress) addr).toString();
}
return inner.toString();
}
@Override
public boolean isConnected() {
return inner.isConnected();
}
@Override
public boolean isBound() {
return false;
}
@Override
public boolean isClosed() {
return !inner.isOpen();
}
@Override
public boolean isInputShutdown() {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public boolean isOutputShutdown() {
throw new UnsupportedOperationException("Unimplemented");
}
@Override
public void setPerformancePreferences(final int connectionTime, final int latency,
final int bandwidth) {
throw new UnsupportedOperationException("Unimplemented");
}
interface SocketOptionSetter {
void run() throws SocketException;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ccr;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.MockLogAppender;
import org.elasticsearch.xpack.CcrSingleNodeTestCase;
import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class CcrLicenseIT extends CcrSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(NonCompliantLicenseLocalStateCcr.class);
}
@Override
protected Settings nodeSettings() {
return Settings.EMPTY;
}
public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException {
final ResumeFollowAction.Request followRequest = getResumeFollowRequest("follower");
final CountDownLatch latch = new CountDownLatch(1);
client().execute(
ResumeFollowAction.INSTANCE,
followRequest,
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(final AcknowledgedResponse response) {
latch.countDown();
fail();
}
@Override
public void onFailure(final Exception e) {
assertNonCompliantLicense(e);
latch.countDown();
}
});
latch.await();
}
public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException {
final PutFollowAction.Request createAndFollowRequest = getPutFollowRequest("leader", "follower");
final CountDownLatch latch = new CountDownLatch(1);
client().execute(
PutFollowAction.INSTANCE,
createAndFollowRequest,
new ActionListener<PutFollowAction.Response>() {
@Override
public void onResponse(final PutFollowAction.Response response) {
latch.countDown();
fail();
}
@Override
public void onFailure(final Exception e) {
assertNonCompliantLicense(e);
latch.countDown();
}
});
latch.await();
}
public void testThatFollowStatsAreUnavailableWithNonCompliantLicense() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
client().execute(
FollowStatsAction.INSTANCE,
new FollowStatsAction.StatsRequest(),
new ActionListener<FollowStatsAction.StatsResponses>() {
@Override
public void onResponse(final FollowStatsAction.StatsResponses statsResponses) {
latch.countDown();
fail();
}
@Override
public void onFailure(final Exception e) {
assertNonCompliantLicense(e);
latch.countDown();
}
});
latch.await();
}
public void testThatPutAutoFollowPatternsIsUnavailableWithNonCompliantLicense() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
final PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request();
request.setName("name");
request.setRemoteCluster("leader");
request.setLeaderIndexPatterns(Collections.singletonList("*"));
client().execute(
PutAutoFollowPatternAction.INSTANCE,
request,
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(final AcknowledgedResponse response) {
latch.countDown();
fail();
}
@Override
public void onFailure(final Exception e) {
assertNonCompliantLicense(e);
latch.countDown();
}
});
latch.await();
}
public void testAutoFollowCoordinatorLogsSkippingAutoFollowCoordinationWithNonCompliantLicense() throws Exception {
final Logger logger = LogManager.getLogger(AutoFollowCoordinator.class);
final MockLogAppender appender = new MockLogAppender();
appender.start();
appender.addExpectation(
new MockLogAppender.ExceptionSeenEventExpectation(
getTestName(),
logger.getName(),
Level.WARN,
"skipping auto-follower coordination",
ElasticsearchSecurityException.class,
"current license is non-compliant for [ccr]"));
try {
// Need to add mock log appender before submitting CS update, otherwise we miss the expected log:
// (Auto followers for new remote clusters are bootstrapped when a new cluster state is published)
Loggers.addAppender(logger, appender);
// Update the cluster state so that we have auto follow patterns and verify that we log a warning
// in case of incompatible license:
CountDownLatch latch = new CountDownLatch(1);
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
clusterService.submitStateUpdateTask("test-add-auto-follow-pattern", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
AutoFollowPattern autoFollowPattern = new AutoFollowPattern("test_alias", Collections.singletonList("logs-*"),
null, true, null, null, null, null, null, null, null, null, null, null);
AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(
Collections.singletonMap("test_alias", autoFollowPattern),
Collections.emptyMap(),
Collections.emptyMap());
ClusterState.Builder newState = ClusterState.builder(currentState);
newState.metaData(MetaData.builder(currentState.getMetaData())
.putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)
.build());
return newState.build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public void onFailure(String source, Exception e) {
latch.countDown();
fail("unexpected error [" + e.getMessage() + "]");
}
});
latch.await();
appender.assertAllExpectationsMatched();
} finally {
Loggers.removeAppender(logger, appender);
appender.stop();
}
}
private void assertNonCompliantLicense(final Exception e) {
assertThat(e, instanceOf(ElasticsearchSecurityException.class));
assertThat(e.getMessage(), equalTo("current license is non-compliant for [ccr]"));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.jms;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSContext;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration;
import org.apache.activemq.artemis.api.core.QueueConfiguration;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.postoffice.Binding;
import org.apache.activemq.artemis.core.postoffice.QueueBinding;
import org.apache.activemq.artemis.core.postoffice.impl.DivertBinding;
import org.apache.activemq.artemis.core.postoffice.impl.LocalQueueBinding;
import org.apache.activemq.artemis.core.security.Role;
import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType;
import org.apache.activemq.artemis.core.server.cluster.impl.RemoteQueueBindingImpl;
import org.apache.activemq.artemis.core.server.embedded.EmbeddedActiveMQ;
import org.apache.activemq.artemis.core.server.impl.AddressInfo;
import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.apache.activemq.artemis.jms.client.ActiveMQDestination;
import org.apache.activemq.artemis.tests.unit.core.postoffice.impl.FakeQueue;
import org.apache.activemq.artemis.tests.util.Wait;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.ReusableLatch;
import org.junit.Assert;
import org.junit.Test;
public class RedeployTest extends ActiveMQTestBase {
@Test
/*
* This tests that the broker doesnt fall over when it tries to delete any autocreated addresses/queues in a clustered environment
* If the undeploy fails then bridges etc can stop working, we need to make sure if undeploy fails on anything the broker is still live
* */
public void testRedeployAutoCreateAddress() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-test-autocreateaddress.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-test-autocreateaddress-reload.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
try (Connection connection = factory.createConnection()) {
Session session = connection.createSession();
Queue queue = session.createQueue("autoQueue");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("text"));
connection.start();
MessageConsumer consumer = session.createConsumer(session.createQueue("autoQueue"));
Assert.assertNotNull("Address wasn't autocreated accordingly", consumer.receive(5000));
}
Assert.assertNotNull(getQueue(embeddedActiveMQ, "autoQueue"));
// this simulates a remote queue or other type being added that wouldnt get deleted, its not valid to have this happen but it can happen when addresses and queues are auto created in a clustered env
embeddedActiveMQ.getActiveMQServer().getPostOffice().addBinding(new RemoteQueueBindingImpl(5L,
new SimpleString("autoQueue"),
new SimpleString("uniqueName"),
new SimpleString("routingName"),
6L,
null,
new FakeQueue(new SimpleString("foo"), 6L),
new SimpleString("bridge"),
1,
MessageLoadBalancingType.OFF));
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
Assert.assertTrue(tryConsume());
Assert.assertNotNull(getQueue(embeddedActiveMQ, "autoQueue"));
factory = new ActiveMQConnectionFactory();
try (Connection connection = factory.createConnection()) {
Session session = connection.createSession();
Queue queue = session.createQueue("autoQueue");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("text"));
connection.start();
MessageConsumer consumer = session.createConsumer(session.createQueue("autoQueue"));
Assert.assertNotNull("autoQueue redeployed accordingly", consumer.receive(5000));
}
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeploy() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-test-jms.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-test-updated-jms.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
Assert.assertEquals("DLQ", embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("jms").getDeadLetterAddress().toString());
Assert.assertEquals("ExpiryQueue", embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("jms").getExpiryAddress().toString());
Assert.assertFalse(tryConsume());
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
Assert.assertTrue(tryConsume());
Assert.assertEquals("NewQueue", embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("jms").getDeadLetterAddress().toString());
Assert.assertEquals("NewQueue", embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("jms").getExpiryAddress().toString());
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
try (Connection connection = factory.createConnection()) {
Session session = connection.createSession();
Queue queue = session.createQueue("DivertQueue");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("text"));
connection.start();
MessageConsumer consumer = session.createConsumer(session.createQueue("NewQueue"));
Assert.assertNotNull("Divert wasn't redeployed accordingly", consumer.receive(5000));
}
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeploySecuritySettings() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-security-settings.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-security-settings-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
Set<Role> roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("foo");
boolean found = false;
for (Role role : roles) {
if (role.getName().equals("a")) {
found = true;
}
}
assertTrue(found);
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("foo");
found = false;
for (Role role : roles) {
if (role.getName().equals("b")) {
found = true;
}
}
assertTrue(found);
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeploySecuritySettingsWithManagementChange() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-security-settings.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-security-settings-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
Set<Role> roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("foo");
boolean found = false;
for (Role role : roles) {
if (role.getName().equals("a")) {
found = true;
}
}
assertTrue(found);
embeddedActiveMQ.getActiveMQServer().getActiveMQServerControl().addSecuritySettings("bar", "c", "c", "c", "c", "c", "c", "c", "c", "c", "c");
roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("bar");
for (Role role : roles) {
if (role.getName().equals("c")) {
found = true;
}
}
assertTrue(found);
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("foo");
found = false;
for (Role role : roles) {
if (role.getName().equals("b")) {
found = true;
}
}
assertTrue(found);
roles = embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch("bar");
found = false;
for (Role role : roles) {
if (role.getName().equals("c")) {
found = true;
}
}
assertTrue(found);
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployAddressSettingsWithManagementChange() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-address-settings.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-address-settings-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
AddressSettings addressSettings = embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("foo");
assertEquals("a", addressSettings.getDeadLetterAddress().toString());
embeddedActiveMQ.getActiveMQServer().getActiveMQServerControl().addAddressSettings("bar", "c", null, 0, false, 0, 0, 0, 0, 0, 0, 0, 0, false, null, 0, 0, null, false, false, false, false);
addressSettings = embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("bar");
assertEquals("c", addressSettings.getDeadLetterAddress().toString());
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
addressSettings = embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("foo");
assertEquals("b", addressSettings.getDeadLetterAddress().toString());
addressSettings = embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch("bar");
assertEquals("c", addressSettings.getDeadLetterAddress().toString());
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployDivertsWithManagementChange() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-diverts.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-diverts-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
DivertBinding divertBinding = (DivertBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(new SimpleString("a"));
assertNotNull(divertBinding);
assertEquals("a", divertBinding.getDivert().getAddress().toString());
embeddedActiveMQ.getActiveMQServer().getActiveMQServerControl().createDivert("c", "c", "c", "target", false, null, null);
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
divertBinding = (DivertBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(new SimpleString("b"));
assertNotNull(divertBinding);
assertEquals("b", divertBinding.getDivert().getAddress().toString());
divertBinding = (DivertBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(new SimpleString("c"));
assertNotNull(divertBinding);
assertEquals("c", divertBinding.getDivert().getAddress().toString());
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployFilter() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-queue-filter.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-queue-filter-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
MessageProducer producer = session.createProducer(queue);
Message message = session.createMessage();
message.setStringProperty("x", "x");
producer.send(message);
MessageConsumer consumer = session.createConsumer(queue);
assertNotNull(consumer.receive(5000));
consumer.close();
}
//Send a message that should remain in the queue (this ensures config change is non-destructive)
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
MessageProducer producer = session.createProducer(queue);
Message message = session.createTextMessage("hello");
message.setStringProperty("x", "x");
producer.send(message);
}
Binding binding = embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(SimpleString.toSimpleString("myFilterQueue"));
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
Binding bindingAfterChange = embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(SimpleString.toSimpleString("myFilterQueue"));
assertTrue("Instance should be the same (as should be non destructive)", binding == bindingAfterChange);
assertEquals(binding.getID(), bindingAfterChange.getID());
//Check that after the config change we can still consume a message that was sent before, ensuring config change was non-destructive of the queue.
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
MessageConsumer consumer = session.createConsumer(queue);
Message message = consumer.receive(5000);
assertNotNull(message);
assertEquals("hello", ((TextMessage)message).getText());
consumer.close();
}
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
MessageProducer producer = session.createProducer(queue);
Message message = session.createMessage();
message.setStringProperty("x", "y");
producer.send(message);
MessageConsumer consumer = session.createConsumer(queue);
assertNotNull(consumer.receive(2000));
consumer.close();
}
} finally {
embeddedActiveMQ.stop();
}
}
private void deployBrokerConfig(EmbeddedActiveMQ server, URL configFile) throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
Files.copy(configFile.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
server.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
}
private void doTestRemoveFilter(URL testConfiguration) throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL baseConfig = RedeployTest.class.getClassLoader().getResource("reload-queue-filter.xml");
Files.copy(baseConfig.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
deployBrokerConfig(embeddedActiveMQ, baseConfig);
try {
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
// Test that the original filter has been set up
LocalQueueBinding queueBinding = (LocalQueueBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("myFilterQueue"));
// The "x = 'x'" value is found in "reload-queue-filter.xml"
assertEquals("x = 'x'", queueBinding.getFilter().getFilterString().toString());
MessageProducer producer = session.createProducer(queue);
// Test that the original filter affects the flow
Message passingMessage = session.createMessage();
passingMessage.setStringProperty("x", "x");
producer.send(passingMessage);
Message filteredMessage = session.createMessage();
filteredMessage.setStringProperty("x", "y");
producer.send(filteredMessage);
MessageConsumer consumer = session.createConsumer(queue);
Message receivedMessage = consumer.receive(2000);
assertNotNull(receivedMessage);
assertEquals("x", receivedMessage.getStringProperty("x"));
assertNull(consumer.receive(2000));
consumer.close();
}
deployBrokerConfig(embeddedActiveMQ, testConfiguration);
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
connection.start();
Queue queue = session.createQueue("myFilterQueue");
// Test that the filter has been removed
LocalQueueBinding queueBinding = (LocalQueueBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("myFilterQueue"));
assertNull(queueBinding.getFilter());
MessageProducer producer = session.createProducer(queue);
// Test that the original filter no longer affects the flow
Message message1 = session.createMessage();
message1.setStringProperty("x", "x");
producer.send(message1);
Message message2 = session.createMessage();
message2.setStringProperty("x", "y");
producer.send(message2);
MessageConsumer consumer = session.createConsumer(queue);
assertNotNull(consumer.receive(2000));
assertNotNull(consumer.receive(2000));
consumer.close();
}
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployRemoveFilter() throws Exception {
doTestRemoveFilter(RedeployTest.class.getClassLoader().getResource("reload-queue-filter-updated-empty.xml"));
doTestRemoveFilter(RedeployTest.class.getClassLoader().getResource("reload-queue-filter-removed.xml"));
}
/**
* This one is here just to make sure it's possible to change queue parameters one by one without setting the others
* to <code>null</code>.
* @throws Exception
*/
@Test
public void testQueuePartialReconfiguration() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url = RedeployTest.class.getClassLoader().getResource("reload-empty.xml");
Files.copy(url.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
try {
embeddedActiveMQ.getActiveMQServer().createQueue(new QueueConfiguration("virtualQueue").setUser("bob"));
embeddedActiveMQ.getActiveMQServer().updateQueue(new QueueConfiguration("virtualQueue").setFilterString("foo"));
LocalQueueBinding queueBinding = (LocalQueueBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("virtualQueue"));
org.apache.activemq.artemis.core.server.Queue queue = queueBinding.getQueue();
assertEquals(new SimpleString("bob"), queue.getUser());
assertEquals(new SimpleString("foo"), queue.getFilter().getFilterString());
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployQueueDefaults() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL baseConfig = RedeployTest.class.getClassLoader().getResource("reload-queue-defaults-before.xml");
URL newConfig = RedeployTest.class.getClassLoader().getResource("reload-queue-defaults-after.xml");
Files.copy(baseConfig.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
try {
LocalQueueBinding queueBinding = (LocalQueueBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("myQueue"));
org.apache.activemq.artemis.core.server.Queue queue = queueBinding.getQueue();
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultMaxQueueConsumers(), queue.getMaxConsumers());
assertNotEquals(RoutingType.MULTICAST, queue.getRoutingType());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultPurgeOnNoConsumers(), queue.isPurgeOnNoConsumers());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultEnabled(), queue.isEnabled());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultExclusive(), queue.isExclusive());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultGroupRebalance(), queue.isGroupRebalance());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultGroupBuckets(), queue.getGroupBuckets());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultGroupFirstKey(), queue.getGroupFirstKey());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultNonDestructive(), queue.isNonDestructive());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultConsumersBeforeDispatch(), queue.getConsumersBeforeDispatch());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultDelayBeforeDispatch(), queue.getDelayBeforeDispatch());
assertNotNull(queue.getFilter());
assertEquals(new SimpleString("jdoe"), queue.getUser());
assertNotEquals(ActiveMQDefaultConfiguration.getDefaultRingSize(), queue.getRingSize());
deployBrokerConfig(embeddedActiveMQ, newConfig);
assertEquals(ActiveMQDefaultConfiguration.getDefaultMaxQueueConsumers(), queue.getMaxConsumers());
assertEquals(RoutingType.MULTICAST, queue.getRoutingType());
assertEquals(ActiveMQDefaultConfiguration.getDefaultPurgeOnNoConsumers(), queue.isPurgeOnNoConsumers());
assertEquals(ActiveMQDefaultConfiguration.getDefaultEnabled(), queue.isEnabled());
assertEquals(ActiveMQDefaultConfiguration.getDefaultExclusive(), queue.isExclusive());
assertEquals(ActiveMQDefaultConfiguration.getDefaultGroupRebalance(), queue.isGroupRebalance());
assertEquals(ActiveMQDefaultConfiguration.getDefaultGroupBuckets(), queue.getGroupBuckets());
assertEquals(ActiveMQDefaultConfiguration.getDefaultGroupFirstKey(), queue.getGroupFirstKey());
assertEquals(ActiveMQDefaultConfiguration.getDefaultNonDestructive(), queue.isNonDestructive());
assertEquals(ActiveMQDefaultConfiguration.getDefaultConsumersBeforeDispatch(), queue.getConsumersBeforeDispatch());
assertEquals(ActiveMQDefaultConfiguration.getDefaultDelayBeforeDispatch(), queue.getDelayBeforeDispatch());
assertNull(queue.getFilter());
assertNull(queue.getUser());
assertEquals(ActiveMQDefaultConfiguration.getDefaultRingSize(), queue.getRingSize());
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testUndeployDivert() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL baseConfig = RedeployTest.class.getClassLoader().getResource("reload-divert-undeploy-before.xml");
URL newConfig = RedeployTest.class.getClassLoader().getResource("reload-divert-undeploy-after.xml");
Files.copy(baseConfig.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
try {
DivertBinding divertBinding = (DivertBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("divert"));
assertNotNull(divertBinding);
Queue sourceQueue = (Queue) ActiveMQDestination.createDestination("queue://source", ActiveMQDestination.TYPE.QUEUE);
Queue targetQueue = (Queue) ActiveMQDestination.createDestination("queue://target", ActiveMQDestination.TYPE.QUEUE);
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE);
MessageProducer sourceProducer = session.createProducer(sourceQueue);
MessageConsumer sourceConsumer = session.createConsumer(sourceQueue);
MessageConsumer targetConsumer = session.createConsumer(targetQueue)) {
connection.start();
Message message = session.createTextMessage("Hello world");
sourceProducer.send(message);
assertNotNull(sourceConsumer.receive(2000));
assertNotNull(targetConsumer.receive(2000));
}
deployBrokerConfig(embeddedActiveMQ, newConfig);
Wait.waitFor(() -> embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("divert")) == null);
divertBinding = (DivertBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice()
.getBinding(new SimpleString("divert"));
assertNull(divertBinding);
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE);
MessageProducer sourceProducer = session.createProducer(sourceQueue);
MessageConsumer sourceConsumer = session.createConsumer(sourceQueue);
MessageConsumer targetConsumer = session.createConsumer(targetQueue)) {
connection.start();
Message message = session.createTextMessage("Hello world");
sourceProducer.send(message);
assertNotNull(sourceConsumer.receive(2000));
assertNull(targetConsumer.receiveNoWait());
}
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployWithFailover() throws Exception {
Set<Role> original = new HashSet<>();
original.add(new Role("a", false, true, false, false, false, false, false, false, false, false));
Set<Role> changed = new HashSet<>();
changed.add(new Role("b", false, true, false, false, false, false, false, false, false, false));
EmbeddedActiveMQ live = new EmbeddedActiveMQ();
EmbeddedActiveMQ backup = new EmbeddedActiveMQ();
try {
// set these system properties to use in the relevant broker.xml files
System.setProperty("live-data-dir", getTestDirfile().toPath() + "/redeploy-live-data");
System.setProperty("backup-data-dir", getTestDirfile().toPath() + "/redeploy-backup-data");
Path liveBrokerXML = getTestDirfile().toPath().resolve("live.xml");
Path backupBrokerXML = getTestDirfile().toPath().resolve("backup.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-live-original.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-live-changed.xml");
URL url3 = RedeployTest.class.getClassLoader().getResource("reload-backup-original.xml");
URL url4 = RedeployTest.class.getClassLoader().getResource("reload-backup-changed.xml");
Files.copy(url1.openStream(), liveBrokerXML);
Files.copy(url3.openStream(), backupBrokerXML);
live.setConfigResourcePath(liveBrokerXML.toUri().toString());
live.start();
waitForServerToStart(live.getActiveMQServer());
backup.setConfigResourcePath(backupBrokerXML.toUri().toString());
backup.start();
assertTrue(Wait.waitFor(() -> backup.getActiveMQServer().isReplicaSync(), 15000, 200));
assertEquals("Test address settings original - live", AddressFullMessagePolicy.BLOCK, live.getActiveMQServer().getAddressSettingsRepository().getMatch("myQueue").getAddressFullMessagePolicy());
assertEquals("Test address settings original - backup", AddressFullMessagePolicy.BLOCK, backup.getActiveMQServer().getAddressSettingsRepository().getMatch("myQueue").getAddressFullMessagePolicy());
assertEquals("Test security settings original - live", original, live.getActiveMQServer().getSecurityRepository().getMatch("myQueue"));
assertEquals("Test security settings original - backup", original, backup.getActiveMQServer().getSecurityRepository().getMatch("myQueue"));
final ReusableLatch liveReloadLatch = new ReusableLatch(1);
Runnable liveTick = () -> liveReloadLatch.countDown();
live.getActiveMQServer().getReloadManager().setTick(liveTick);
final ReusableLatch backupReloadTickLatch = new ReusableLatch(1);
Runnable backupTick = () -> backupReloadTickLatch.countDown();
backup.getActiveMQServer().getReloadManager().setTick(backupTick);
liveReloadLatch.await(10, TimeUnit.SECONDS);
Files.copy(url2.openStream(), liveBrokerXML, StandardCopyOption.REPLACE_EXISTING);
liveBrokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
liveReloadLatch.countUp();
live.getActiveMQServer().getReloadManager().setTick(liveTick);
liveReloadLatch.await(10, TimeUnit.SECONDS);
backupReloadTickLatch.await(10, TimeUnit.SECONDS);
Files.copy(url4.openStream(), backupBrokerXML, StandardCopyOption.REPLACE_EXISTING);
backupBrokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
backupReloadTickLatch.countUp();
backup.getActiveMQServer().getReloadManager().setTick(backupTick);
backupReloadTickLatch.await(10, TimeUnit.SECONDS);
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory("tcp://127.0.0.1:61616");
try (Connection connection = factory.createConnection()) {
Session session = connection.createSession();
Queue queue = session.createQueue("myQueue2");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("text1"));
}
assertFalse(backup.getActiveMQServer().isActive());
assertEquals("Test address settings redeploy - live", AddressFullMessagePolicy.PAGE, live.getActiveMQServer().getAddressSettingsRepository().getMatch("myQueue").getAddressFullMessagePolicy());
assertEquals("Test security settings redeploy - live", changed, live.getActiveMQServer().getSecurityRepository().getMatch("myQueue"));
live.stop();
assertTrue(Wait.waitFor(() -> (backup.getActiveMQServer().isActive()), 5000, 100));
factory = new ActiveMQConnectionFactory("tcp://127.0.0.1:61617");
try (Connection connection = factory.createConnection()) {
Session session = connection.createSession();
Queue queue = session.createQueue("myQueue2");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("text"));
connection.start();
MessageConsumer consumer = session.createConsumer(session.createQueue("myQueue2"));
Assert.assertNotNull("Queue wasn't deployed accordingly", consumer.receive(5000));
Assert.assertNotNull(consumer.receive(5000));
}
assertEquals("Test security settings redeploy - backup", changed, backup.getActiveMQServer().getSecurityRepository().getMatch("myQueue"));
assertEquals("Test address settings redeploy - backup", AddressFullMessagePolicy.PAGE, backup.getActiveMQServer().getAddressSettingsRepository().getMatch("myQueue").getAddressFullMessagePolicy());
} finally {
live.stop();
backup.stop();
System.clearProperty("live-data-dir");
System.clearProperty("backup-data-dir");
}
}
private boolean tryConsume() throws JMSException {
try (ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory();
Connection connection = factory.createConnection();
Session session = connection.createSession(Session.AUTO_ACKNOWLEDGE)) {
Queue queue = session.createQueue("NewQueue");
MessageConsumer consumer = session.createConsumer(queue);
return true;
} catch (JMSException e) {
return false;
}
}
@Test
public void testRedeployAddressQueue() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-address-queues.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-address-queues-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory();
try (JMSContext jmsContext = connectionFactory.createContext()) {
jmsContext.createSharedDurableConsumer(jmsContext.createTopic("config_test_consumer_created_queues"),"mySub").receive(100);
}
try {
latch.await(10, TimeUnit.SECONDS);
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_consumer_created_queues").contains("mySub"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal_no_queue"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_1"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "permanent_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "permanent_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "permanent_test_queue_removal").contains("permanent_test_queue_removal_queue_1"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "permanent_test_queue_removal").contains("permanent_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_change").contains("config_test_queue_change_queue"));
Assert.assertEquals(10, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").getMaxConsumers());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isPurgeOnNoConsumers());
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
//Ensure queues created by clients (NOT by broker.xml are not removed when we reload).
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_consumer_created_queues").contains("mySub"));
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal_no_queue"));
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_1"));
Assert.assertFalse(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "permanent_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "permanent_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "permanent_test_queue_removal").contains("permanent_test_queue_removal_queue_1"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "permanent_test_queue_removal").contains("permanent_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_change").contains("config_test_queue_change_queue"));
Assert.assertEquals(1, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").getMaxConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isPurgeOnNoConsumers());
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change_queue"));
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal_queue_1"));
} finally {
embeddedActiveMQ.stop();
}
}
@Test
public void testRedeployChangeQueueRoutingType() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-queue-routingtype.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-queue-routingtype-updated.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(latch::countDown);
try {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("tcp://0.0.0.0:61616");
try (JMSContext context = connectionFactory.createContext()) {
context.createProducer().send(context.createQueue("myAddress"), "hello");
}
latch.await(10, TimeUnit.SECONDS);
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "myAddress"));
Assert.assertEquals(RoutingType.ANYCAST, getQueue(embeddedActiveMQ, "myQueue").getRoutingType());
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(latch::countDown);
Assert.assertTrue(latch.await(10, TimeUnit.SECONDS));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "myAddress"));
Assert.assertEquals(RoutingType.MULTICAST, getQueue(embeddedActiveMQ, "myQueue").getRoutingType());
//Ensures the queue isnt detroyed by checking message sent before change is consumable after (e.g. no message loss)
try (JMSContext context = connectionFactory.createContext()) {
Message message = context.createSharedDurableConsumer(context.createTopic("myAddress"), "myQueue").receive();
assertEquals("hello", ((TextMessage) message).getText());
}
} finally {
embeddedActiveMQ.stop();
}
}
/**
* Simulates Stop and Start that occurs when network health checker stops the server when network is detected unhealthy
* and re-starts the broker once detected that it is healthy again.
*
* @throws Exception for anything un-expected, test will fail.
*/
@Test
public void testRedeployStopAndRestart() throws Exception {
Path brokerXML = getTestDirfile().toPath().resolve("broker.xml");
URL url1 = RedeployTest.class.getClassLoader().getResource("reload-original.xml");
URL url2 = RedeployTest.class.getClassLoader().getResource("reload-changed.xml");
Files.copy(url1.openStream(), brokerXML);
EmbeddedActiveMQ embeddedActiveMQ = new EmbeddedActiveMQ();
embeddedActiveMQ.setConfigResourcePath(brokerXML.toUri().toString());
embeddedActiveMQ.start();
final ReusableLatch latch = new ReusableLatch(1);
Runnable tick = latch::countDown;
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
try {
latch.await(10, TimeUnit.SECONDS);
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").size(), 1);
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").iterator().next().getName(), "b");
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getDeadLetterAddress(), SimpleString.toSimpleString("OriginalDLQ"));
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getExpiryAddress(), SimpleString.toSimpleString("OriginalExpiryQueue"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal_no_queue"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_1"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_change").contains("config_test_queue_change_queue"));
Assert.assertEquals(10, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").getMaxConsumers());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isPurgeOnNoConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isEnabled());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isPurgeOnNoConsumers());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isEnabled());
Files.copy(url2.openStream(), brokerXML, StandardCopyOption.REPLACE_EXISTING);
brokerXML.toFile().setLastModified(System.currentTimeMillis() + 1000);
latch.setCount(1);
embeddedActiveMQ.getActiveMQServer().getReloadManager().setTick(tick);
latch.await(10, TimeUnit.SECONDS);
//Assert that the security settings change applied
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").size(), 1);
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").iterator().next().getName(), "c");
//Assert that the address settings change applied
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getDeadLetterAddress(), SimpleString.toSimpleString("NewDLQ"));
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getExpiryAddress(), SimpleString.toSimpleString("NewExpiryQueue"));
//Assert the address and queue changes applied
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal_no_queue"));
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_1"));
Assert.assertFalse(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_change").contains("config_test_queue_change_queue"));
Assert.assertEquals(1, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").getMaxConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isPurgeOnNoConsumers());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isEnabled());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isPurgeOnNoConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isEnabled());
} finally {
embeddedActiveMQ.stop();
}
try {
embeddedActiveMQ.start();
//Assert that the security settings changes persist a stop and start server (e.g. like what occurs if network health check stops the node), but JVM remains up.
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").size(), 1);
Assert.assertEquals(getSecurityRoles(embeddedActiveMQ, "security_address").iterator().next().getName(), "c");
//Assert that the address settings changes persist a stop and start server (e.g. like what occurs if network health check stops the node), but JVM remains up.
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getDeadLetterAddress(), SimpleString.toSimpleString("NewDLQ"));
Assert.assertEquals(getAddressSettings(embeddedActiveMQ, "address_settings_address").getExpiryAddress(), SimpleString.toSimpleString("NewExpiryQueue"));
//Assert that the address and queue changes persist a stop and start server (e.g. like what occurs if network health check stops the node), but JVM remains up.
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal_no_queue"));
Assert.assertNull(getAddressInfo(embeddedActiveMQ, "config_test_address_removal"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_removal"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_1"));
Assert.assertFalse(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_removal").contains("config_test_queue_removal_queue_2"));
Assert.assertNotNull(getAddressInfo(embeddedActiveMQ, "config_test_queue_change"));
Assert.assertTrue(listQueuesNamesForAddress(embeddedActiveMQ, "config_test_queue_change").contains("config_test_queue_change_queue"));
Assert.assertEquals(1, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").getMaxConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isPurgeOnNoConsumers());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue").isEnabled());
Assert.assertEquals(false, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isPurgeOnNoConsumers());
Assert.assertEquals(true, getQueue(embeddedActiveMQ, "config_test_queue_change_queue_defaults").isEnabled());
} finally {
embeddedActiveMQ.stop();
}
}
private AddressSettings getAddressSettings(EmbeddedActiveMQ embeddedActiveMQ, String address) {
return embeddedActiveMQ.getActiveMQServer().getAddressSettingsRepository().getMatch(address);
}
private Set<Role> getSecurityRoles(EmbeddedActiveMQ embeddedActiveMQ, String address) {
return embeddedActiveMQ.getActiveMQServer().getSecurityRepository().getMatch(address);
}
private AddressInfo getAddressInfo(EmbeddedActiveMQ embeddedActiveMQ, String address) {
return embeddedActiveMQ.getActiveMQServer().getPostOffice().getAddressInfo(SimpleString.toSimpleString(address));
}
private org.apache.activemq.artemis.core.server.Queue getQueue(EmbeddedActiveMQ embeddedActiveMQ, String queueName) throws Exception {
QueueBinding queueBinding = (QueueBinding) embeddedActiveMQ.getActiveMQServer().getPostOffice().getBinding(SimpleString.toSimpleString(queueName));
return queueBinding == null ? null : queueBinding.getQueue();
}
private List<String> listQueuesNamesForAddress(EmbeddedActiveMQ embeddedActiveMQ, String address) throws Exception {
return embeddedActiveMQ.getActiveMQServer().getPostOffice().listQueuesForAddress(SimpleString.toSimpleString(address)).stream().map(
org.apache.activemq.artemis.core.server.Queue::getName).map(SimpleString::toString).collect(Collectors.toList());
}
}
| |
package com.sms4blood.emergencyhealthservices;
import android.content.Context;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.telephony.SmsManager;
import android.util.Log;
import android.view.View;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Toast;
import com.sms4blood.emergencyhealthservices.app.activities.BaseActivity;
import com.sms4blood.emergencyhealthservices.io.ReverseGeocoder;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/**
* Created by Manish on 24-04-2015.
*/
public class Medicalshop extends BaseActivity implements ReverseGeocoder.AddressDecodeListener {
EditText e1, e2, e3, e4,s3,a4,a5;
Person1 person1;
int a=0,b;
CheckBox ch;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.medicalshop);
e1 = (EditText) findViewById(R.id.editTextReg1);//Hospital Name
e2 = (EditText) findViewById(R.id.editTextReg112);//Hospital Address
e3 = (EditText) findViewById(R.id.editTextReg223);//pincode
e4 = (EditText) findViewById(R.id.editTextReg2234);//Contact number
ch= (CheckBox) findViewById(R.id.checkBox);// CHECKBOX
s3 = (EditText) findViewById(R.id.state3);//state
a4 = (EditText) findViewById(R.id.district3);
a5 = (EditText) findViewById(R.id.city3);
if (isNetworkAvailable()) {
showProgressDialog("Please Wait...");
ReverseGeocoder addressDecoder = new ReverseGeocoder(this);
addressDecoder.execute();
}
}
@Override
public void onAddressDecoded(List<Address> addresses) {
dismissProgressDialog();
if (addresses != null && addresses.size() > 0) {
e2.setText(addresses.get(0).getThoroughfare());
e3.setText(addresses.get(0).getPostalCode());
a5.setText(addresses.get(0).getLocality());
s3.setText(addresses.get(0).getAdminArea());
a4.setText(addresses.get(0).getSubAdminArea());
}
}
public static String POST(String url, Person1 person1){
InputStream inputStream = null;
String result = "";
String response="";
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost(url);
try {
// 1. create HttpClien
List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(5);
nameValuePairs.add(new BasicNameValuePair("typeSel","MEDICAL_SHOP"));
nameValuePairs.add(new BasicNameValuePair("userName",person1.getName()));
nameValuePairs.add(new BasicNameValuePair("city", person1.getcity()));
nameValuePairs.add(new BasicNameValuePair("state", person1.getstate()));
nameValuePairs.add(new BasicNameValuePair("address", person1.getaddress()));
nameValuePairs.add(new BasicNameValuePair("pincode", person1.getpinno()));
nameValuePairs.add(new BasicNameValuePair("mobileNo", person1.getMobileno()));
nameValuePairs.add(new BasicNameValuePair("optionalData1", person1.getDistrict()));
httppost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
httppost.setHeader("Content-Type", "application/x-www-form-urlencoded");
// 8. Execute POST request to the given URL
HttpResponse httpResponse = httpclient.execute(httppost);
// 9. receive response as inputStream
inputStream = httpResponse.getEntity().getContent();
// 10. convert inputstream to string
if(inputStream != null)
result = convertInputStreamToString(inputStream);
else
result = "Did not work!";
} catch (Exception e) {
Log.d("InputStream", e.getLocalizedMessage());
}
try {
JSONObject jObject = new JSONObject(result);
response = jObject.getString("message");
}
catch (JSONException e) {
}
// 11. return result
return response;
}
private class HttpAsyncTask extends AsyncTask<String, Void, String> {
@Override
protected void onPreExecute() {
super.onPreExecute();
showProgressDialog("Please Wait...");
}
@Override
protected String doInBackground(String... urls) {
person1=new Person1();
person1.setName(e1.getText().toString());
person1.setcity(a5.getText().toString());
person1.setstate(s3.getText().toString());
person1.setaddress(e2.getText().toString());
person1.setpinno(e3.getText().toString());
person1.setMobileno(e4.getText().toString());
person1.setdistrict(a4.getText().toString());
return POST(urls[0],person1);
}
// onPostExecute displays the results of the AsyncTask.
@Override
protected void onPostExecute(String result) {
dismissProgressDialog();
Toast.makeText(getBaseContext(), result, Toast.LENGTH_LONG).show();
finishAfterSuccessfulRegistration(result);
}
}
private boolean validate(){
if(e1.getText().toString().trim().equals(""))
return false;
else if(e2.getText().toString().trim().equals(""))
return false;
else if(e3.getText().toString().trim().equals(""))
return false;
else if(e4.getText().toString().trim().equals(""))
return false;
else
return true;
}
public void item(View v)
{
if (((CheckBox) v).isChecked())
a=1;
else
a=0;
}
private static String convertInputStreamToString(InputStream inputStream) throws IOException{
BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(inputStream));
String line = "";
String result = "";
while((line = bufferedReader.readLine()) != null)
result += line;
inputStream.close();
return result;
}
public void submit(View v) {
boolean tru=isNetworkAvailable();
if(!validate())
Toast.makeText(getBaseContext(), "Name,Address,Pincode,mobile no cannot be empty!", Toast.LENGTH_LONG).show();
else {
if(e4.getText().toString().length()<10)
{
Toast.makeText(getBaseContext(), "Enter 10 Digit Mobile Number!", Toast.LENGTH_LONG).show();
}
else if(e3.getText().toString().length()<6)
{
Toast.makeText(getBaseContext(), "Enter 6 Digit PIN CODE!", Toast.LENGTH_LONG).show();
}
else
if(e4.getText().toString().length()==10 && e3.getText().toString().length()==6) {
if(a==1) {
if (isNetworkAvailable() == true) {
new HttpAsyncTask().execute("http://sms4blood.s156.eatj.com/registration/donor.json");
} else {
SmsManager smsManager = SmsManager.getDefault();
smsManager.sendTextMessage("+919664172929", null, "REG MEDSHP " + e3.getText().toString() + " " + e1.getText().toString() + "," + e2.getText().toString(), null, null);
}
}
else
{
Toast.makeText(this,"PLEASE CHECKED THE AUTHORIZATION",Toast.LENGTH_LONG).show();
}
}
}
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.http.impl.io;
import java.io.IOException;
import java.net.Socket;
import org.apache.hc.core5.annotation.Contract;
import org.apache.hc.core5.annotation.ThreadingBehavior;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.ContentLengthStrategy;
import org.apache.hc.core5.http.config.CharCodingConfig;
import org.apache.hc.core5.http.config.Http1Config;
import org.apache.hc.core5.http.impl.CharCodingSupport;
import org.apache.hc.core5.http.io.HttpConnectionFactory;
import org.apache.hc.core5.http.io.HttpMessageParserFactory;
import org.apache.hc.core5.http.io.HttpMessageWriterFactory;
import org.apache.hc.core5.http.io.ResponseOutOfOrderStrategy;
/**
* Default factory for {@link org.apache.hc.core5.http.io.HttpClientConnection}s.
*
* @since 4.3
*/
@Contract(threading = ThreadingBehavior.IMMUTABLE_CONDITIONAL)
public class DefaultBHttpClientConnectionFactory
implements HttpConnectionFactory<DefaultBHttpClientConnection> {
private final Http1Config http1Config;
private final CharCodingConfig charCodingConfig;
private final ContentLengthStrategy incomingContentStrategy;
private final ContentLengthStrategy outgoingContentStrategy;
private final ResponseOutOfOrderStrategy responseOutOfOrderStrategy;
private final HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory;
private final HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory;
private DefaultBHttpClientConnectionFactory(
final Http1Config http1Config,
final CharCodingConfig charCodingConfig,
final ContentLengthStrategy incomingContentStrategy,
final ContentLengthStrategy outgoingContentStrategy,
final ResponseOutOfOrderStrategy responseOutOfOrderStrategy,
final HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory,
final HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory) {
this.http1Config = http1Config != null ? http1Config : Http1Config.DEFAULT;
this.charCodingConfig = charCodingConfig != null ? charCodingConfig : CharCodingConfig.DEFAULT;
this.incomingContentStrategy = incomingContentStrategy;
this.outgoingContentStrategy = outgoingContentStrategy;
this.responseOutOfOrderStrategy = responseOutOfOrderStrategy;
this.requestWriterFactory = requestWriterFactory;
this.responseParserFactory = responseParserFactory;
}
public DefaultBHttpClientConnectionFactory(
final Http1Config http1Config,
final CharCodingConfig charCodingConfig,
final ContentLengthStrategy incomingContentStrategy,
final ContentLengthStrategy outgoingContentStrategy,
final HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory,
final HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory) {
this(
http1Config,
charCodingConfig,
incomingContentStrategy,
outgoingContentStrategy,
null,
requestWriterFactory,
responseParserFactory);
}
public DefaultBHttpClientConnectionFactory(
final Http1Config http1Config,
final CharCodingConfig charCodingConfig,
final HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory,
final HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory) {
this(http1Config, charCodingConfig, null, null, requestWriterFactory, responseParserFactory);
}
public DefaultBHttpClientConnectionFactory(
final Http1Config http1Config,
final CharCodingConfig charCodingConfig) {
this(http1Config, charCodingConfig, null, null, null, null);
}
public DefaultBHttpClientConnectionFactory() {
this(null, null, null, null, null, null);
}
@Override
public DefaultBHttpClientConnection createConnection(final Socket socket) throws IOException {
final DefaultBHttpClientConnection conn = new DefaultBHttpClientConnection(
this.http1Config,
CharCodingSupport.createDecoder(this.charCodingConfig),
CharCodingSupport.createEncoder(this.charCodingConfig),
this.incomingContentStrategy,
this.outgoingContentStrategy,
this.responseOutOfOrderStrategy,
this.requestWriterFactory,
this.responseParserFactory);
conn.bind(socket);
return conn;
}
/**
* Create a new {@link Builder}.
*
* @since 5.1
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder for {@link DefaultBHttpClientConnectionFactory}.
*
* @since 5.1
*/
public static final class Builder {
private Http1Config http1Config;
private CharCodingConfig charCodingConfig;
private ContentLengthStrategy incomingContentLengthStrategy;
private ContentLengthStrategy outgoingContentLengthStrategy;
private ResponseOutOfOrderStrategy responseOutOfOrderStrategy;
private HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory;
private HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory;
private Builder() {}
public Builder http1Config(final Http1Config http1Config) {
this.http1Config = http1Config;
return this;
}
public Builder charCodingConfig(final CharCodingConfig charCodingConfig) {
this.charCodingConfig = charCodingConfig;
return this;
}
public Builder incomingContentLengthStrategy(final ContentLengthStrategy incomingContentLengthStrategy) {
this.incomingContentLengthStrategy = incomingContentLengthStrategy;
return this;
}
public Builder outgoingContentLengthStrategy(final ContentLengthStrategy outgoingContentLengthStrategy) {
this.outgoingContentLengthStrategy = outgoingContentLengthStrategy;
return this;
}
public Builder responseOutOfOrderStrategy(final ResponseOutOfOrderStrategy responseOutOfOrderStrategy) {
this.responseOutOfOrderStrategy = responseOutOfOrderStrategy;
return this;
}
public Builder requestWriterFactory(
final HttpMessageWriterFactory<ClassicHttpRequest> requestWriterFactory) {
this.requestWriterFactory = requestWriterFactory;
return this;
}
public Builder responseParserFactory(
final HttpMessageParserFactory<ClassicHttpResponse> responseParserFactory) {
this.responseParserFactory = responseParserFactory;
return this;
}
public DefaultBHttpClientConnectionFactory build() {
return new DefaultBHttpClientConnectionFactory(
http1Config,
charCodingConfig,
incomingContentLengthStrategy,
outgoingContentLengthStrategy,
responseOutOfOrderStrategy,
requestWriterFactory,
responseParserFactory);
}
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.os;
import android.util.Log;
import java.io.FileDescriptor;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* MemoryFile is a wrapper for the Linux ashmem driver.
* MemoryFiles are backed by shared memory, which can be optionally
* set to be purgeable.
* Purgeable files may have their contents reclaimed by the kernel
* in low memory conditions (only if allowPurging is set to true).
* After a file is purged, attempts to read or write the file will
* cause an IOException to be thrown.
*/
public class MemoryFile
{
private static String TAG = "MemoryFile";
// mmap(2) protection flags from <sys/mman.h>
private static final int PROT_READ = 0x1;
private static final int PROT_WRITE = 0x2;
private static native FileDescriptor native_open(String name, int length) throws IOException;
// returns memory address for ashmem region
private static native long native_mmap(FileDescriptor fd, int length, int mode)
throws IOException;
private static native void native_munmap(long addr, int length) throws IOException;
private static native void native_close(FileDescriptor fd);
private static native int native_read(FileDescriptor fd, long address, byte[] buffer,
int srcOffset, int destOffset, int count, boolean isUnpinned) throws IOException;
private static native void native_write(FileDescriptor fd, long address, byte[] buffer,
int srcOffset, int destOffset, int count, boolean isUnpinned) throws IOException;
private static native void native_pin(FileDescriptor fd, boolean pin) throws IOException;
private static native int native_get_size(FileDescriptor fd) throws IOException;
private FileDescriptor mFD; // ashmem file descriptor
private long mAddress; // address of ashmem memory
private int mLength; // total length of our ashmem region
private boolean mAllowPurging = false; // true if our ashmem region is unpinned
/**
* Allocates a new ashmem region. The region is initially not purgable.
*
* @param name optional name for the file (can be null).
* @param length of the memory file in bytes.
* @throws IOException if the memory file could not be created.
*/
public MemoryFile(String name, int length) throws IOException {
mLength = length;
mFD = native_open(name, length);
if (length > 0) {
mAddress = native_mmap(mFD, length, PROT_READ | PROT_WRITE);
} else {
mAddress = 0;
}
}
/**
* Closes the memory file. If there are no other open references to the memory
* file, it will be deleted.
*/
public void close() {
deactivate();
if (!isClosed()) {
native_close(mFD);
}
}
/**
* Unmaps the memory file from the process's memory space, but does not close it.
* After this method has been called, read and write operations through this object
* will fail, but {@link #getFileDescriptor()} will still return a valid file descriptor.
*
* @hide
*/
void deactivate() {
if (!isDeactivated()) {
try {
native_munmap(mAddress, mLength);
mAddress = 0;
} catch (IOException ex) {
Log.e(TAG, ex.toString());
}
}
}
/**
* Checks whether the memory file has been deactivated.
*/
private boolean isDeactivated() {
return mAddress == 0;
}
/**
* Checks whether the memory file has been closed.
*/
private boolean isClosed() {
return !mFD.valid();
}
@Override
protected void finalize() {
if (!isClosed()) {
Log.e(TAG, "MemoryFile.finalize() called while ashmem still open");
close();
}
}
/**
* Returns the length of the memory file.
*
* @return file length.
*/
public int length() {
return mLength;
}
/**
* Is memory file purging enabled?
*
* @return true if the file may be purged.
*/
public boolean isPurgingAllowed() {
return mAllowPurging;
}
/**
* Enables or disables purging of the memory file.
*
* @param allowPurging true if the operating system can purge the contents
* of the file in low memory situations
* @return previous value of allowPurging
*/
synchronized public boolean allowPurging(boolean allowPurging) throws IOException {
boolean oldValue = mAllowPurging;
if (oldValue != allowPurging) {
native_pin(mFD, !allowPurging);
mAllowPurging = allowPurging;
}
return oldValue;
}
/**
* Creates a new InputStream for reading from the memory file.
*
@return InputStream
*/
public InputStream getInputStream() {
return new MemoryInputStream();
}
/**
* Creates a new OutputStream for writing to the memory file.
*
@return OutputStream
*/
public OutputStream getOutputStream() {
return new MemoryOutputStream();
}
/**
* Reads bytes from the memory file.
* Will throw an IOException if the file has been purged.
*
* @param buffer byte array to read bytes into.
* @param srcOffset offset into the memory file to read from.
* @param destOffset offset into the byte array buffer to read into.
* @param count number of bytes to read.
* @return number of bytes read.
* @throws IOException if the memory file has been purged or deactivated.
*/
public int readBytes(byte[] buffer, int srcOffset, int destOffset, int count)
throws IOException {
if (isDeactivated()) {
throw new IOException("Can't read from deactivated memory file.");
}
if (destOffset < 0 || destOffset > buffer.length || count < 0
|| count > buffer.length - destOffset
|| srcOffset < 0 || srcOffset > mLength
|| count > mLength - srcOffset) {
throw new IndexOutOfBoundsException();
}
return native_read(mFD, mAddress, buffer, srcOffset, destOffset, count, mAllowPurging);
}
/**
* Write bytes to the memory file.
* Will throw an IOException if the file has been purged.
*
* @param buffer byte array to write bytes from.
* @param srcOffset offset into the byte array buffer to write from.
* @param destOffset offset into the memory file to write to.
* @param count number of bytes to write.
* @throws IOException if the memory file has been purged or deactivated.
*/
public void writeBytes(byte[] buffer, int srcOffset, int destOffset, int count)
throws IOException {
if (isDeactivated()) {
throw new IOException("Can't write to deactivated memory file.");
}
if (srcOffset < 0 || srcOffset > buffer.length || count < 0
|| count > buffer.length - srcOffset
|| destOffset < 0 || destOffset > mLength
|| count > mLength - destOffset) {
throw new IndexOutOfBoundsException();
}
native_write(mFD, mAddress, buffer, srcOffset, destOffset, count, mAllowPurging);
}
/**
* Gets a FileDescriptor for the memory file.
*
* The returned file descriptor is not duplicated.
*
* @throws IOException If the memory file has been closed.
*
* @hide
*/
public FileDescriptor getFileDescriptor() throws IOException {
return mFD;
}
/**
* Returns the size of the memory file that the file descriptor refers to,
* or -1 if the file descriptor does not refer to a memory file.
*
* @throws IOException If <code>fd</code> is not a valid file descriptor.
*
* @hide
*/
public static int getSize(FileDescriptor fd) throws IOException {
return native_get_size(fd);
}
private class MemoryInputStream extends InputStream {
private int mMark = 0;
private int mOffset = 0;
private byte[] mSingleByte;
@Override
public int available() throws IOException {
if (mOffset >= mLength) {
return 0;
}
return mLength - mOffset;
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark(int readlimit) {
mMark = mOffset;
}
@Override
public void reset() throws IOException {
mOffset = mMark;
}
@Override
public int read() throws IOException {
if (mSingleByte == null) {
mSingleByte = new byte[1];
}
int result = read(mSingleByte, 0, 1);
if (result != 1) {
return -1;
}
return mSingleByte[0];
}
@Override
public int read(byte buffer[], int offset, int count) throws IOException {
if (offset < 0 || count < 0 || offset + count > buffer.length) {
// readBytes() also does this check, but we need to do it before
// changing count.
throw new IndexOutOfBoundsException();
}
count = Math.min(count, available());
if (count < 1) {
return -1;
}
int result = readBytes(buffer, mOffset, offset, count);
if (result > 0) {
mOffset += result;
}
return result;
}
@Override
public long skip(long n) throws IOException {
if (mOffset + n > mLength) {
n = mLength - mOffset;
}
mOffset += n;
return n;
}
}
private class MemoryOutputStream extends OutputStream {
private int mOffset = 0;
private byte[] mSingleByte;
@Override
public void write(byte buffer[], int offset, int count) throws IOException {
writeBytes(buffer, offset, mOffset, count);
mOffset += count;
}
@Override
public void write(int oneByte) throws IOException {
if (mSingleByte == null) {
mSingleByte = new byte[1];
}
mSingleByte[0] = (byte)oneByte;
write(mSingleByte, 0, 1);
}
}
}
| |
package org.edx.mobile.services;
import android.content.Intent;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import com.google.inject.Inject;
import org.edx.mobile.R;
import org.edx.mobile.http.provider.OkHttpClientProvider;
import org.edx.mobile.logger.Logger;
import org.edx.mobile.model.DownloadDescriptor;
import org.edx.mobile.module.analytics.Analytics;
import org.edx.mobile.module.analytics.AnalyticsRegistry;
import org.edx.mobile.module.prefs.PrefManager;
import org.edx.mobile.util.NetworkUtil;
import java.io.IOException;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import roboguice.service.RoboService;
/**
* Created by marcashman on 2014-12-01.
*/
public class DownloadSpeedService extends RoboService {
private static final String TAG = DownloadSpeedService.class.getCanonicalName();
private static final long NS_PER_SEC = 1000000000;
private static final int BLOCK_SIZE = 4096;
public static final String EXTRA_FILE_DESC = TAG + ".file_desc";
public static final String EXTRA_REPORT_PROGRESS = TAG + ".report_progress";
public static final String EXTRA_KBPS = TAG + ".kbps";
public static final String EXTRA_SECONDS = TAG + ".seconds";
public static final String EXTRA_ERROR = TAG + ".error";
public static final String ACTION_DOWNLOAD_DONE = TAG + ".download_done";
private static final int RUN_SPEED_TEST_MESSAGE = 5555;
private int DELAY_IN_MILLISECONDS = 5000;
private static final Logger logger = new Logger(DownloadSpeedService.class);
@Inject
private OkHttpClientProvider okHttpClientProvider;
@Inject
private AnalyticsRegistry analyticsRegistry;
SpeedTestHandler messageHandler;
Timer timer = null;
TimerTask timerTask = null;
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if(intent != null) {
Message msg = messageHandler.obtainMessage();
DownloadDescriptor descriptor = intent.getParcelableExtra(EXTRA_FILE_DESC);
if (descriptor != null) {
msg.obj = descriptor;
msg.what = RUN_SPEED_TEST_MESSAGE;
messageHandler.sendMessage(msg);
} else {
logger.warn("missing file description");
}
}
return START_NOT_STICKY;
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
startThread();
DELAY_IN_MILLISECONDS = getResources().getInteger(R.integer.delay_speed_test_in_milliseconds);
super.onCreate();
}
private void startThread(){
HandlerThread thread = new HandlerThread("SpeedTestThread", android.os.Process.THREAD_PRIORITY_BACKGROUND);
thread.start();
Looper serviceLooper = thread.getLooper();
messageHandler = new SpeedTestHandler(serviceLooper);
}
private synchronized void performDownload(DownloadDescriptor file) {
final long startTime;
try {
startTime = System.nanoTime();
OkHttpClient client = okHttpClientProvider.getNonOAuthBased().newBuilder()
.connectTimeout(getResources().getInteger(
R.integer.speed_test_timeout_in_milliseconds), TimeUnit.MILLISECONDS)
.build();
Request request = new Request.Builder()
.url(file.getUrl())
.build();
client.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException throwable) {
logger.error(throwable);
//If it times out, set a low value for download speed
setCurrentDownloadSpeed(0.01f);
}
@Override
public void onResponse(Call call, Response response) throws IOException {
if (!response.isSuccessful()) {
logger.debug("Download Speed Test Failed");
} else {
long length = response.body().string().length();
double seconds = (System.nanoTime() - startTime) / NS_PER_SEC;
if( seconds != 0 ) {
final float downloadSpeedKps = (float) ((length / seconds) / 1024);
setCurrentDownloadSpeed(downloadSpeedKps);
reportDownloadSpeed(downloadSpeedKps);
}
}
}
});
}catch (Exception ex){
logger.error(ex);
}
}
private void reportDownloadSpeed(float downloadSpeedKps){
try{
if (NetworkUtil.isConnectedWifi(DownloadSpeedService.this)) {
analyticsRegistry.trackUserConnectionSpeed(Analytics.Values.WIFI, downloadSpeedKps);
} else if (NetworkUtil.isConnectedMobile(DownloadSpeedService.this)) {
analyticsRegistry.trackUserConnectionSpeed(Analytics.Values.CELL_DATA, downloadSpeedKps);
}
}catch(Exception e){
logger.error(e);
}
}
private void setCurrentDownloadSpeed(float downloadSpeedKps){
PrefManager manager = new PrefManager(this, PrefManager.Pref.WIFI);
manager.put(PrefManager.Key.SPEED_TEST_KBPS, downloadSpeedKps);
}
public class SpeedTestHandler extends Handler {
public SpeedTestHandler(Looper looper) {
super(looper);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
int messageType = msg.what;
if(messageType == RUN_SPEED_TEST_MESSAGE){
final DownloadDescriptor file = (DownloadDescriptor) msg.obj;
if(file != null){
scheduleNewDownload(file);
}
}
}
}
private void scheduleNewDownload(final DownloadDescriptor file) {
if(timerTask != null) {
timerTask.cancel();
timer.cancel();
timerTask = null;
timer = null;
}
if(file.shouldForceDownload()) {
performDownload(file);
} else {
timerTask = new TimerTask() {
@Override
public void run() {
performDownload(file);
}
};
timer = new Timer();
timer.schedule(timerTask, DELAY_IN_MILLISECONDS);
}
}
}
| |
/***********************************************************************************************************************
*
* Akula v2: A Toolset for Experimenting and Developing Thread Placement Algorithms on Multicore Systems
* ==========================================
*
* Copyright (C) 2014 by Evgeny Vinnik and Sergey Blagodurov
*
***********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
**********************************************************************************************************************/
package com.synar.akula;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.monitor.FileAlterationListener;
import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
import org.apache.commons.io.monitor.FileAlterationMonitor;
import org.apache.commons.io.monitor.FileAlterationObserver;
import au.com.bytecode.opencsv.CSVReader;
import com.synar.akula.hardware.Datacenter;
import com.synar.akula.software.Thread;
import com.synar.akula.utils.Initializer;
public class Akula
{
static FileAlterationMonitor monitor;
public static void main(String[] args)
{
// create the command line parser
CommandLineParser parser = new BasicParser();
Options options = createOptions();
HelpFormatter formatter = new HelpFormatter();
String header = "\nAkula simulator\n----------------\n\nCommand line parameters:";
String footer = "\nSFU Synar Lab (c) 2014";
if ((args.length < 1) || (args[0].equals("-h")) || (args[0].equals("--help")))
{
formatter.printHelp("akula", header, options, footer, true);
System.exit(0);
}
else
{
File jobsFile;
File outputFile;
File scheduleFile = null;
File intermediateFile;
CSVReader jobsFileReader;
List<String[]> jobs = null;
int racksNumber = 40;
int machinesNumber = 10;
int domainsNumber = 2;
int coresNumber = 4;
try
{
// parse the command line arguments
CommandLine line = parser.parse(options, args);
{
jobsFile = new File(line.getOptionValue('j'));
try
{
jobsFile.getCanonicalPath();
}
catch (IOException e)
{
System.out.println("Path to jobs file isn't valid");
System.exit(-1);
}
if (jobsFile.isDirectory())
{
System.out.println("Path to jobs file shouldn't be a directory");
System.exit(-1);
}
if (!jobsFile.exists())
{
System.out.println("Jobs file doesn't exist");
System.exit(-1);
}
else
{
jobsFileReader = new CSVReader(new FileReader(jobsFile), ',', '\"', 0);
jobs = jobsFileReader.readAll();
}
}
{
outputFile = new File(line.getOptionValue('o'));
try
{
outputFile.getCanonicalPath();
}
catch (IOException e)
{
System.out.println("Path to the output file isn't valid");
System.exit(-1);
}
if (outputFile.isDirectory())
{
System.out.println("Path for output file shouldn't be a directory");
System.exit(-1);
}
}
{
scheduleFile = new File(line.getOptionValue('s'));
try
{
scheduleFile.getCanonicalPath();
}
catch (IOException e)
{
System.out.println("Path to the schedule file isn't valid");
System.exit(-1);
}
if (scheduleFile.isDirectory())
{
System.out.println("Path for schedule file shouldn't be a directory");
System.exit(-1);
}
}
{
intermediateFile = new File(line.getOptionValue('i'));
try
{
intermediateFile.getCanonicalPath();
}
catch (IOException e)
{
System.out.println("Path to the intermediate file isn't valid");
System.exit(-1);
}
if (outputFile.isDirectory())
{
System.out.println("Path for intermediate file shouldn't be a directory");
System.exit(-1);
}
}
String s = "";
// validate that block-size has been set
if (line.hasOption('h'))
{
// automatically generate the help statement
formatter.printHelp("akula", header, options, footer, true);
System.exit(0);
}
if (line.hasOption('r'))
{
try
{
s = line.getOptionValue('r');
racksNumber = Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new ParseException("Not an integer value " + s);
}
}
if (line.hasOption('m'))
{
try
{
s = line.getOptionValue('m');
machinesNumber = Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new ParseException("Not an integer value " + s);
}
}
if (line.hasOption('d'))
{
try
{
s = line.getOptionValue('d');
domainsNumber = Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new ParseException("Not an integer value " + s);
}
}
if (line.hasOption('c'))
{
try
{
s = line.getOptionValue('c');
coresNumber = Integer.parseInt(s);
}
catch (NumberFormatException e)
{
throw new ParseException("Not an integer value " + s);
}
}
}
catch (ParseException exp)
{
System.out.println("Wrong parameters:" + exp.getMessage());
formatter.printHelp("akula", header, options, footer, true);
System.exit(-1);
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("Starting simulation");
/*
* Initialize everything needed for a bootstrap simulation.
*/
// Try writing it back out as CSV to the console
ArrayList<String> jobsNames = new ArrayList<String>();
ArrayList<Long> mapInputBytes = new ArrayList<Long>();
ArrayList<Long> shuffleBytes = new ArrayList<Long>();
ArrayList<Long> reduceOutputBytes = new ArrayList<Long>();
ArrayList<Integer> mappersNumber = new ArrayList<Integer>();
ArrayList<Integer> containerMappersNumber = new ArrayList<Integer>();
ArrayList<Integer> reducersNumber = new ArrayList<Integer>();
ArrayList<Integer> containerReducersNumber = new ArrayList<Integer>();
ArrayList<Integer> containersPerJobNumber = new ArrayList<Integer>();
ArrayList<Float> shuffleMegabytes = new ArrayList<Float>();
ArrayList<Integer> commClass = new ArrayList<Integer>();
ArrayList<Float> commWiseDegradation = new ArrayList<Float>();
ArrayList<Integer> volumesNumber = new ArrayList<Integer>();
ArrayList<Integer> contentionClass = new ArrayList<Integer>();
ArrayList<Float> contentionDegradation = new ArrayList<Float>();
ArrayList<Float> containerPowerConsumption = new ArrayList<Float>();
for (int i = 0; i < jobs.size(); i++)
{
String[] job = jobs.get(i);
jobsNames.add(job[0]);
mapInputBytes.add(Long.parseLong(job[1]));
shuffleBytes.add(Long.parseLong(job[2]));
reduceOutputBytes.add(Long.parseLong(job[3]));
mappersNumber.add(Integer.parseInt(job[4]));
containerMappersNumber.add(Integer.parseInt(job[5]));
reducersNumber.add(Integer.parseInt(job[6]));
containerReducersNumber.add(Integer.parseInt(job[7]));
containersPerJobNumber.add(Integer.parseInt(job[8]));
shuffleMegabytes.add(Float.parseFloat(job[9]));
commClass.add(Integer.parseInt(job[10]));
commWiseDegradation.add(Float.parseFloat(job[11]));
volumesNumber.add(Integer.parseInt(job[12]));
contentionClass.add(Integer.parseInt(job[13]));
contentionDegradation.add(Float.parseFloat(job[14]));
containerPowerConsumption.add(Float.parseFloat(job[15]));
}
/*StringWriter sw = new StringWriter();
CSVWriter writer = new CSVWriter(sw);
writer.writeAll(jobs);
System.out.println("\n\nGenerated CSV File:\n\n");
System.out.println(sw.toString());*/
// Read in the the bootstrap data from files and create the lookup tables.
Initializer init = new Initializer();
BootstrapDb degrad_matrix = init.getDegradMatrix();
BootstrapDb solo_matrix = init.getSoloMatrix();
// Initialize a data center with 40 racks with 10 machines each, with 2 memory-domains of 4-cores each.
Datacenter datacenter = new Datacenter(racksNumber, machinesNumber, domainsNumber, coresNumber);
// Create the initial workload. (4 devil threads MCF and 4 turtle threads gamess).
// To make this more versatile it is best to create workload files with thread data
// and a parser which creates workloads by reading the files.
Thread workload[] = new Thread[8];
for (int i = 0; i < 4; i++)
{
Thread new_thread = new Thread();
new_thread.mThreadName = "MCF" + i;
new_thread.mBenchmarkId = 3; //The benchmark id of MCF see initializer.java
new_thread.mStartTime = i;
workload[i] = new_thread;
}
for (int i = 4; i < 8; i++)
{
Thread new_thread = new Thread();
new_thread.mThreadName = "GAMESS" + (i - 4);
new_thread.mBenchmarkId = 8; //The benchmark id of GAMESS see initializer.java
new_thread.mStartTime = i;
workload[i] = new_thread;
}
// The monitor will perform polling on the folder every 5 seconds
final long pollingInterval = 1 * 1000;
File folder = new File(scheduleFile.getParent());
if (!folder.exists())
{
// Test to see if monitored folder exists
throw new RuntimeException("Directory not found: " + scheduleFile.getParent());
}
FileAlterationObserver observer = new FileAlterationObserver(scheduleFile.getParent());
monitor = new FileAlterationMonitor(pollingInterval);
FileAlterationListener listener = new FileAlterationListenerAdaptor()
{
// Is triggered when a file is created in the monitored folder
@Override
public void onFileCreate(File file)
{
try
{
// "file" is the reference to the newly created file
System.out.println("File created: "
+ file.getCanonicalPath());
}
catch (IOException e)
{
e.printStackTrace(System.err);
}
}
// Is triggered when a file is deleted from the monitored folder
@Override
public void onFileDelete(File file)
{
try
{
// "file" is the reference to the removed file
System.out.println("File removed: "
+ file.getCanonicalPath());
// "file" does not exists anymore in the location
System.out.println("File still exists in location: "
+ file.exists());
if (file.getName().equals("schedule.txt"))
{
try
{
System.out.println("Exiting: "
+ file.getCanonicalPath());
monitor.stop();
}
catch (Exception e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
catch (IOException e)
{
e.printStackTrace(System.err);
}
}
// Is triggered when a file is deleted from the monitored folder
@Override
public void onFileChange(File file)
{
try
{
// "file" is the reference to the removed file
System.out.println("File changed: "
+ file.getCanonicalPath());
}
catch (IOException e)
{
e.printStackTrace(System.err);
}
}
};
observer.addListener(listener);
monitor.addObserver(observer);
try
{
monitor.start();
}
catch (Exception e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
/*
* Launch the bootstrap module.
*/
double tick = 1.00;
Bootstrap mod = new Bootstrap(datacenter, workload, tick, degrad_matrix, solo_matrix);
mod.run_experiment();
/*
* Collect the completed threads and obtain their statistics.
*/
Stats s = new Stats();
s.mSolos = solo_matrix;
s.dumpStats("my_results.txt", workload);
System.out.println("done");
}
}
private static Options createOptions()
{
// create the Options
Options options = new Options();
//create jobsFileOption
Option jobsFileOption = OptionBuilder.create('j');
jobsFileOption.setLongOpt("jobs");
jobsFileOption.setDescription("Path to file containing jobs");
jobsFileOption.setRequired(true);
jobsFileOption.setOptionalArg(false);
jobsFileOption.setArgs(1);
jobsFileOption.setArgName("jobs_file");
//create outputFileOption
Option outputFileOption = OptionBuilder.create('o');
outputFileOption.setLongOpt("output");
outputFileOption.setDescription("Path for file, where results will be stored");
outputFileOption.setRequired(true);
outputFileOption.setOptionalArg(false);
outputFileOption.setArgs(1);
outputFileOption.setArgName("output_file");
//create scheduleFileOption
Option scheduleFileOption = OptionBuilder.create('s');
scheduleFileOption.setLongOpt("schedule");
scheduleFileOption.setDescription("Path to file, from which new schedule will be read");
scheduleFileOption.setRequired(true);
scheduleFileOption.setOptionalArg(false);
scheduleFileOption.setArgs(1);
scheduleFileOption.setArgName("schedule_file");
//create intermediateFileOption
Option intermediateFileOption = OptionBuilder.create('i');
intermediateFileOption.setLongOpt("intermediate");
intermediateFileOption.setDescription("Path to file, where intermediate state will be printed");
intermediateFileOption.setRequired(true);
intermediateFileOption.setOptionalArg(false);
intermediateFileOption.setArgs(1);
intermediateFileOption.setArgName("intermediate_file");
//create racksOption
Option racksOption = OptionBuilder.create('r');
racksOption.setLongOpt("racks");
racksOption.setDescription("Number of racks in the datacenter. Default is 40");
racksOption.setRequired(false);
racksOption.setOptionalArg(false);
racksOption.setArgs(1);
racksOption.setArgName("number_racks");
racksOption.setType(Integer.class);
//create machinesOption
Option machinesOption = OptionBuilder.create('m');
machinesOption.setLongOpt("machines");
machinesOption.setDescription("Number of machines in each rack. Default is 10");
machinesOption.setRequired(false);
machinesOption.setOptionalArg(false);
machinesOption.setArgs(1);
machinesOption.setArgName("number_machines");
machinesOption.setType(Integer.class);
//create domainsOption
Option domainsOption = OptionBuilder.create('d');
domainsOption.setLongOpt("domains");
domainsOption.setDescription("Number of memory domains (CPUs) in each machine. Default is 2");
domainsOption.setRequired(false);
domainsOption.setOptionalArg(false);
domainsOption.setArgs(1);
domainsOption.setArgName("number_domains");
domainsOption.setType(Integer.class);
//create coresOption
Option coresOption = OptionBuilder.create('c');
coresOption.setLongOpt("cores");
coresOption.setDescription("Number of cores in each memory domain (CPU). Default is 4");
coresOption.setRequired(false);
coresOption.setOptionalArg(false);
coresOption.setArgs(1);
coresOption.setArgName("number_cores");
coresOption.setType(Integer.class);
//create pollingOption
Option pollingOption = OptionBuilder.create('p');
pollingOption.setLongOpt("polling");
pollingOption.setDescription("Schedule polling interval in milliseconds. Default is 1000 ms");
pollingOption.setRequired(false);
pollingOption.setOptionalArg(false);
pollingOption.setArgs(1);
pollingOption.setArgName("ms");
pollingOption.setType(Integer.class);
//create helpOption
Option helpOption = OptionBuilder.create('h');
helpOption.setLongOpt("help");
helpOption.setDescription("Print this help");
helpOption.setRequired(false);
options.addOption(jobsFileOption);
options.addOption(outputFileOption);
options.addOption(scheduleFileOption);
options.addOption(intermediateFileOption);
options.addOption(racksOption);
options.addOption(machinesOption);
options.addOption(domainsOption);
options.addOption(coresOption);
options.addOption(helpOption);
return options;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.photos.library.sample.demos.upload;
import com.google.photos.library.sample.demos.upload.tasks.ByteUploadTask;
import com.google.photos.library.sample.demos.upload.tasks.ItemCreationTask;
import com.google.photos.library.v1.PhotosLibraryClient;
import com.google.photos.library.v1.upload.UploadMediaItemRequest;
import com.google.photos.types.proto.MediaItem;
import java.io.File;
import java.util.*;
import java.util.concurrent.*;
/**
* Uploads files to the Google Photos Library API.
*
* <p>Given a set of {@link File}s, this class handles byte upload and media item creation using a
* {@link PhotosLibraryClient}. The bytes of the media files are uploaded in parallel, using the
* {@link #numByteUploadThreads}. Once the {@link #numCreationBatchSize} of successful byte uploads
* has been reached, {@link PhotosLibraryClient#batchCreateMediaItems(List)} is used to create the
* media. Note that this call is NOT made in parallel, as only one call to create media items should
* be made at the same time for a user.
*/
public class Uploader implements AutoCloseable {
/** Number of parallel threads to use when uploading image bytes. */
private final int numByteUploadThreads;
/**
* Number of media items to include in a call to `mediaItems.batchCreate`. This must be not be
* greater than 50. See
* https://developers.google.com/photos/library/guides/upload-media#creating-media-item
*/
private final int numCreationBatchSize;
/** Number of parallel threads to use when uploading image bytes. */
public static final int NUM_BYTE_UPLOAD_THREAD_DEFAULT = 6;
/**
* Number of media items to include in a call to `mediaItems.batchCreate`. This must be not be
* greater than 50. See
* https://developers.google.com/photos/library/guides/upload-media#creating-media-item
*/
public static final int NUM_BATCH_SIZE_DEFAULT = 50;
/** The API client used to make calls to the Google Photos Library API. */
private final PhotosLibraryClient client;
/** Executor that facilitates handling of the upload threads. */
private final ExecutorService uploadExecutor;
/** Service that manages the queue for upload tasks. */
private final CompletionService<ByteUploadTask.ByteUploadResult> uploadService;
/** Successful byte uploads, indexed by their upload token. */
private HashMap<String, ByteUploadTask.ByteUploadResult> successfulUploads;
/** Queue of successful uploads to be created next. */
private final List<ByteUploadTask.ByteUploadResult> creationQueue;
/** Failed byte uploads. */
private final HashMap<File, ByteUploadTask.ByteUploadResult> failedUploads = new HashMap<>();
/** Results of creating media items, indexed by the upload token for each item. */
private HashMap<String, ItemCreationTask.ItemCreationResult> itemCreationResults;
/**
* Uploader that uploads media using the configured parallel byte upload thrads and batch size.
*
* @param client The Google Photos Library API client library client.
* @param numByteUploadThreads The number of parallel byte upload threads to use.
* @param numCreationBatchSize The number of media items to include in a call to
* 'mediaItems.batchCreate'. Can be at most 50.
*/
public Uploader(PhotosLibraryClient client, int numByteUploadThreads, int numCreationBatchSize) {
this.client = client;
this.numByteUploadThreads = numByteUploadThreads;
this.numCreationBatchSize = numCreationBatchSize;
this.creationQueue = new ArrayList<>(numCreationBatchSize);
// Set up the threading and executors for byte uploads.
// Using a fixed number of threads to upload bytes in parallel. Note that this should be
// adjusted for your own
// circumstances.
uploadExecutor = Executors.newFixedThreadPool(this.numByteUploadThreads);
uploadService = new ExecutorCompletionService<>(uploadExecutor);
}
/**
* Uploader that uploads media using the default options for the number of byte upload threads and
* batch size. Note that this object must be closed after completion of all calls.
*
* @param client The Google Photos Library API client library client.
* @see #NUM_BYTE_UPLOAD_THREAD_DEFAULT
* @see #NUM_BATCH_SIZE_DEFAULT
*/
public Uploader(PhotosLibraryClient client) {
this(client, NUM_BYTE_UPLOAD_THREAD_DEFAULT, NUM_BATCH_SIZE_DEFAULT);
}
/** Shut down the executors and its threads. */
@Override
public void close() {
// Shutdown the executors
if (uploadExecutor != null) {
uploadExecutor.shutdown();
}
}
/**
* Upload files to Google Photos using the Google Photos Library API.
*
* <p>Uses {@link ByteUploadTask} (which uses {@link
* PhotosLibraryClient#uploadMediaItem(UploadMediaItemRequest)}) to upload the bytes of the given
* files. Then, it uses {@link ItemCreationTask} to create the media items (which uses {@link
* PhotosLibraryClient#batchCreateMediaItems(List)}).
*
* @param files Files to upload.
*/
public void uploadFiles(File[] files) throws InterruptedException, ExecutionException {
if (client == null) {
System.err.println("Client was not initialized.");
return;
}
successfulUploads = new HashMap<>(files.length);
itemCreationResults = new HashMap<>(files.length);
// Schedule and start the uploading of files. This method schedules their execution in the
// uploadService using the number of threads configured.
scheduleUploadFileBytes(files);
System.out.println("All byte uploads tasks have been scheduled.");
/*
Loop and wait for tasks to complete on the uploadService, one at a time.
Once enough uploads have completed, ie. the number of upload tokens matches the configured item
creation batch size, call the Library API to create the media items.
Note that calls to create media items are *NOT* done in parallel!
Creation of media items *MUST* only be done sequentially for each user!
You should never call `mediaItems.batchCreate` in parallel for the same user, always wait
until the previous call to create media items for a user has completed before making the next
call. (It can be called in parallel for different users.)
*/
for (int finishedResults = 0; finishedResults < files.length; finishedResults++) {
// Wait until a task is completed and get its result.
Future<ByteUploadTask.ByteUploadResult> futureResult = uploadService.take();
ByteUploadTask.ByteUploadResult byteResult = futureResult.get();
if (byteResult.isOk()) {
// The bytes were successfully uploaded and an upload token is available.
successfulUploads.put(byteResult.uploadToken, byteResult);
// Add it to the queue for the next call to create media items.
creationQueue.add(byteResult);
} else {
// The byte upload failed, collect its result and deal with the error later.
failedUploads.put(byteResult.file, byteResult);
}
// If enough tasks have completed so that a batch is full (or this was the final upload),
// submit the upload tokens to create media items.
if (creationQueue.size() >= numCreationBatchSize
|| successfulUploads.size() + failedUploads.size() >= files.length) {
System.out.println("Starting batch creation call.");
createMediaItems();
}
}
System.out.println("Execution complete.");
}
/**
* Use an {@link ItemCreationTask} to call the Library API to creat media items from the internal
* queue.
*/
private void createMediaItems() {
if (creationQueue.isEmpty()) {
// No items in the queue to be created. All byte uploads may have failed.
System.err.println("No items to create.");
return;
}
// Empty the queue and get the items to be created.
List<ByteUploadTask.ByteUploadResult> itemsToCreate = new ArrayList<>(creationQueue);
creationQueue.clear();
// Make the API call to `mediaItems.batchCreate`. This method returns the status of each
// item.
// Important: Note how this call is made sequentially, blocking further execution until it
// completes.
// Execution of this task is not scheduled in a separate thread, instead it is done here
// on the main thread blocking further execution until it completes.
Map<String, ItemCreationTask.ItemCreationResult> creationResult =
new ItemCreationTask(client, itemsToCreate).call();
// Store all results for later processing.
itemCreationResults.putAll(creationResult);
}
/**
* Use a {@link ByteUploadTask} to call the Library API to upload media items. All API calls are
* scheduled here using the {@link #uploadService}.
*
* @param files Files to upload.
* @see CompletionService#submit(Callable)
*/
private void scheduleUploadFileBytes(File[] files) {
// Queue all files for upload. The ByteUploadTask uploads the bytes of the file to the Library
// API.
// Note that the ExecutorService handles execution of threads, here they are queued up for
// processing.
// These tasks are executed in parallel, based on the thread pool configured above.
// The Library API supports parallel byte uploads for the same user.
for (File f : files) {
System.out.println("Scheduling byte upload for: " + f.getName());
// Initialise a new upload tasks and schedule it for execution.
ByteUploadTask task = new ByteUploadTask(client, f);
uploadService.submit(task);
}
}
/**
* Prints the status of the Uploader, including all failed and successful uploads and media item
* creations.
*/
public void printState() {
// Print the failed byte uploads
System.out.println("The following " + failedUploads.size() + " files could not be uploaded:");
for (ByteUploadTask.ByteUploadResult uploadResult : failedUploads.values()) {
// Print the error that lead to this failure.
// If it was an ApiException there may be some additional details that could be examined
// before retrying it as
// needed. Here it is just printed out.
print(uploadResult.file, uploadResult.error.toString());
}
// Print the successful media item creations and extract ones that failed.
System.out.println("The following items were successfully created:");
// Check the status of each item creation, keep track of all failed creations for retry.
List<ItemCreationTask.ItemCreationResult> failedCreations = new LinkedList<>();
for (Map.Entry<String, ItemCreationTask.ItemCreationResult> entry :
itemCreationResults.entrySet()) {
ItemCreationTask.ItemCreationResult value = entry.getValue();
if (value.isOk()) {
// The item was successfully created. Print out its details.
MediaItem item = value.mediaItem.get();
print(entry.getKey(), item.getProductUrl());
} else {
// The item could not be created. Keep track of it.
failedCreations.add(value);
}
}
// Print the failed media item creations. If possible, print some additional details if the API
// returned an error.
System.out.println("The following " + failedCreations.size() + " files could not be created:");
for (ItemCreationTask.ItemCreationResult result : failedCreations) {
// The file was successfully uploaded in the first step. Look up its File definition by the
// upload token
File file = successfulUploads.get(result.uploadToken).file;
if (result.status.isPresent()) {
// The API returned a status that contains some information about the error.
// Here it is just printed out.
print(file, result.status.get().toString());
} else {
// Print details about the Throwable that caused this error. If it was an ApiException there
// may be some additional details that could be examined before retrying it as needed.
// Here it is just printed out.
print(file, result.error.toString());
}
}
}
/**
* Print a message to {@link System#out}, prefixed by the file name.
*
* @param file Name of this file is used as a prefix for the message.
* @param message Message to print out.
*/
private static void print(File file, String message) {
print(file.getName(), message);
}
/**
* Print a message to {@link System#out}, prefixed by a string.
*
* @param prefix Prefix printed before the message.
* @param message Message to print out.
*/
private static void print(String prefix, String message) {
System.out.println("\t" + prefix + ": " + message);
}
}
| |
package io.quantumdb.nemesis.backends;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import com.google.common.base.Strings;
import io.quantumdb.nemesis.structure.ColumnDefinition;
import io.quantumdb.nemesis.structure.Database;
import io.quantumdb.nemesis.structure.Database.Feature;
import io.quantumdb.nemesis.structure.Database.Type;
import io.quantumdb.nemesis.structure.DatabaseCredentials;
import io.quantumdb.nemesis.structure.Table;
import io.quantumdb.nemesis.structure.TableDefinition;
import lombok.extern.slf4j.Slf4j;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@Slf4j
@RunWith(Parameterized.class)
public class StructuralTest {
public static final String TABLE_NAME = "test";
@Parameterized.Parameters(name = "{index} - {0}")
public static List<Object[]> listParameters() {
return Arrays.asList(new Object[][] {
{ Type.POSTGRESQL, new DatabaseCredentials("jdbc:postgresql://localhost", "profiler",
get("PG_USER", "profiler"), get("PG_PASSWORD", "profiler")) },
{ Type.MYSQL_55, new DatabaseCredentials("jdbc:mysql://localhost", "nemesis",
get("MYSQL_USER", "root"), get("MYSQL_PASSWORD", "root")) },
{ Type.MYSQL_56, new DatabaseCredentials("jdbc:mysql://localhost", "nemesis",
get("MYSQL_USER", "root"), get("MYSQL_PASSWORD", "root")) }
});
}
private static String get(String envKey, String defaultValue) {
String envValue = System.getenv(envKey);
if (Strings.isNullOrEmpty(envValue)) {
return defaultValue;
}
return envValue;
}
private final Database database;
private final DatabaseCredentials credentials;
public StructuralTest(Database.Type type, DatabaseCredentials credentials) {
this.database = type.createBackend();
this.credentials = credentials;
}
@Before
public void setUp() throws SQLException {
database.connect(credentials);
database.dropContents();
}
@After
public void tearDown() throws SQLException {
database.dropContents();
database.close();
}
@Test
public void testTableCreation() throws SQLException {
database.createTable(new TableDefinition(TABLE_NAME)
.withColumn(new ColumnDefinition("id", "bigint")
.isIdentity(true)
.isNullable(false)
.isAutoIncrement(true))
.withColumn(new ColumnDefinition("name", "varchar(255)")
.isNullable(false)));
Table table = database.getTable("test");
Assert.assertNotNull(table.getColumn("id"));
Assert.assertNotNull(table.getColumn("name"));
}
@Test
public void testTableRenaming() throws SQLException {
testTableCreation();
database.getTable("test")
.rename("test2");
Assert.assertFalse(database.hasTable("test"));
Assert.assertTrue(database.hasTable("test2"));
}
@Test
public void testTableDropping() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME).drop();
Assert.assertTrue(database.listTables().isEmpty());
}
@Test
public void testAddingNullableColumn() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME).addColumn(new ColumnDefinition("city", "varchar(255)")
.setNullable(true));
Assert.assertTrue(database.getTable(TABLE_NAME)
.getColumn("city")
.isNullable());
}
@Test
public void testAddingIdentityColumn() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME).addColumn(new ColumnDefinition("other_id", "bigint")
.setIdentity(true)
.setNullable(false)
.setDefaultExpression("'1'"));
Assert.assertTrue(database.getTable(TABLE_NAME)
.getColumn("other_id")
.isIdentity());
}
@Test
public void testAddingAutoIncrementColumn() throws SQLException {
Assume.assumeTrue(database.supports(Database.Feature.MULTIPLE_AUTO_INCREMENT_COLUMNS));
testTableCreation();
database.getTable(TABLE_NAME).addColumn(new ColumnDefinition("sequence_number", "bigint")
.setAutoIncrement(true)
.setNullable(false));
Assert.assertTrue(database.getTable(TABLE_NAME)
.getColumn("sequence_number")
.isAutoIncrement());
}
@Test
public void testAddingColumnWithDefaultExpression() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME).addColumn(new ColumnDefinition("blocked", "boolean")
.setNullable(false)
.setDefaultExpression("FALSE"));
String defaultExpression = database.getTable(TABLE_NAME)
.getColumn("blocked")
.getDefaultExpression();
Assert.assertTrue("false".equals(defaultExpression) || "0".equals(defaultExpression));
}
@Test
public void testDroppingColumn() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME)
.getColumn("name")
.drop();
Assert.assertFalse(database.getTable(TABLE_NAME)
.hasColumn("name"));
}
@Test
public void testMakingColumnNullable() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME)
.getColumn("name")
.setNullable(true);
Assert.assertTrue(database.getTable(TABLE_NAME)
.getColumn("name")
.isNullable());
}
@Test
public void testMakingColumnNonNullable() throws SQLException {
testMakingColumnNullable();
database.getTable(TABLE_NAME)
.getColumn("name")
.setNullable(false);
Assert.assertFalse(database.getTable(TABLE_NAME)
.getColumn("name")
.isNullable());
}
@Test
public void testSettingDefaultExpressionOnColumn() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME)
.getColumn("name")
.setDefaultExpression("\'NONE\'");
Assert.assertNotNull(database.getTable(TABLE_NAME)
.getColumn("name")
.getDefaultExpression());
}
@Test
public void testDroppingDefaultExpressionOnColumn() throws SQLException {
testSettingDefaultExpressionOnColumn();
database.getTable(TABLE_NAME)
.getColumn("name")
.setDefaultExpression(null);
Assert.assertNull(database.getTable(TABLE_NAME)
.getColumn("name")
.getDefaultExpression());
}
@Test
public void testMakingColumnAnIdentityColumn() throws SQLException {
Assume.assumeTrue(database.supports(Feature.MULTIPLE_AUTO_INCREMENT_COLUMNS));
testTableCreation();
database.getTable(TABLE_NAME)
.getColumn("name")
.setIdentity(true);
Assert.assertTrue(database.getTable(TABLE_NAME)
.getColumn("name")
.isIdentity());
}
@Test
public void testCreatingConstraint() throws SQLException {
Assume.assumeTrue(database.supports(Database.Feature.COLUMN_CONSTRAINTS));
testTableCreation();
database.getTable(TABLE_NAME)
.addColumn(new ColumnDefinition("counter", "bigint")
.setNullable(false));
database.getTable(TABLE_NAME)
.createConstraint("simple_constraint", "CHECK", "(counter > 0)");
Assert.assertTrue(database.getTable(TABLE_NAME)
.hasConstraint("simple_constraint"));
}
@Test
public void testDroppingConstraint() throws SQLException {
Assume.assumeTrue(database.supports(Database.Feature.COLUMN_CONSTRAINTS));
testCreatingConstraint();
database.getTable(TABLE_NAME)
.getConstraint("simple_constraint")
.drop();
Assert.assertFalse(database.getTable(TABLE_NAME)
.hasConstraint("simple_constraint"));
}
@Test
public void testCreatingNonUniqueIndex() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME)
.createIndex("test_name_idx", false, "name");
Assert.assertNotNull(database.getTable(TABLE_NAME)
.getIndex("test_name_idx"));
}
@Test
public void testCreatingUniqueIndex() throws SQLException {
testTableCreation();
database.getTable(TABLE_NAME)
.createIndex("test_name_idx", true, "name");
Assert.assertNotNull(database.getTable(TABLE_NAME)
.getIndex("test_name_idx"));
}
@Test
public void testDroppingIndex() throws SQLException {
testCreatingNonUniqueIndex();
database.getTable(TABLE_NAME)
.getIndex("test_name_idx")
.drop();
Assert.assertFalse(database.getTable(TABLE_NAME)
.hasIndex("test_name_idx"));
}
@Test
public void testRenamingIndex() throws SQLException {
Assume.assumeTrue(database.supports(Database.Feature.RENAME_INDEX));
testCreatingNonUniqueIndex();
database.getTable(TABLE_NAME)
.getIndex("test_name_idx")
.rename("test_name2_idx");
Assert.assertFalse(database.getTable(TABLE_NAME)
.hasIndex("test_name_idx"));
Assert.assertTrue(database.getTable(TABLE_NAME)
.hasIndex("test_name2_idx"));
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* EbsBlockDeviceType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* EbsBlockDeviceType bean class
*/
public class EbsBlockDeviceType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = EbsBlockDeviceType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for SnapshotId
*/
protected java.lang.String localSnapshotId ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localSnapshotIdTracker = false ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getSnapshotId(){
return localSnapshotId;
}
/**
* Auto generated setter method
* @param param SnapshotId
*/
public void setSnapshotId(java.lang.String param){
if (param != null){
//update the setting tracker
localSnapshotIdTracker = true;
} else {
localSnapshotIdTracker = false;
}
this.localSnapshotId=param;
}
/**
* field for VolumeSize
*/
protected int localVolumeSize ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localVolumeSizeTracker = false ;
/**
* Auto generated getter method
* @return int
*/
public int getVolumeSize(){
return localVolumeSize;
}
/**
* Auto generated setter method
* @param param VolumeSize
*/
public void setVolumeSize(int param){
// setting primitive attribute tracker to true
if (param==java.lang.Integer.MIN_VALUE) {
localVolumeSizeTracker = false;
} else {
localVolumeSizeTracker = true;
}
this.localVolumeSize=param;
}
/**
* field for DeleteOnTermination
*/
protected boolean localDeleteOnTermination ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localDeleteOnTerminationTracker = false ;
/**
* Auto generated getter method
* @return boolean
*/
public boolean getDeleteOnTermination(){
return localDeleteOnTermination;
}
/**
* Auto generated setter method
* @param param DeleteOnTermination
*/
public void setDeleteOnTermination(boolean param){
// setting primitive attribute tracker to true
if (false) {
localDeleteOnTerminationTracker = false;
} else {
localDeleteOnTerminationTracker = true;
}
this.localDeleteOnTermination=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
EbsBlockDeviceType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":EbsBlockDeviceType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"EbsBlockDeviceType",
xmlWriter);
}
}
if (localSnapshotIdTracker){
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"snapshotId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"snapshotId");
}
} else {
xmlWriter.writeStartElement("snapshotId");
}
if (localSnapshotId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!");
}else{
xmlWriter.writeCharacters(localSnapshotId);
}
xmlWriter.writeEndElement();
} if (localVolumeSizeTracker){
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"volumeSize", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"volumeSize");
}
} else {
xmlWriter.writeStartElement("volumeSize");
}
if (localVolumeSize==java.lang.Integer.MIN_VALUE) {
throw new org.apache.axis2.databinding.ADBException("volumeSize cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeSize));
}
xmlWriter.writeEndElement();
} if (localDeleteOnTerminationTracker){
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"deleteOnTermination", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"deleteOnTermination");
}
} else {
xmlWriter.writeStartElement("deleteOnTermination");
}
if (false) {
throw new org.apache.axis2.databinding.ADBException("deleteOnTermination cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDeleteOnTermination));
}
xmlWriter.writeEndElement();
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localSnapshotIdTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"snapshotId"));
if (localSnapshotId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSnapshotId));
} else {
throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!");
}
} if (localVolumeSizeTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"volumeSize"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeSize));
} if (localDeleteOnTerminationTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"deleteOnTermination"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDeleteOnTermination));
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static EbsBlockDeviceType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
EbsBlockDeviceType object =
new EbsBlockDeviceType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"EbsBlockDeviceType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (EbsBlockDeviceType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","snapshotId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setSnapshotId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","volumeSize").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setVolumeSize(
org.apache.axis2.databinding.utils.ConverterUtil.convertToInt(content));
reader.next();
} // End of if for expected property start element
else {
object.setVolumeSize(java.lang.Integer.MIN_VALUE);
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","deleteOnTermination").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setDeleteOnTermination(
org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
*
* This file was generated by LLRP Code Generator
* see http://llrp-toolkit.cvs.sourceforge.net/llrp-toolkit/
* for more information
* Generated on: Sun Apr 08 14:14:10 EDT 2012;
*
*/
/*
* Copyright 2007 ETH Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
*
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
*/
package org.llrp.ltk.generated.messages;
import maximsblog.blogspot.com.llrpexplorer.Logger;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.llrp.ltk.exceptions.InvalidLLRPMessageException;
import org.llrp.ltk.exceptions.MissingParameterException;
import org.llrp.ltk.generated.LLRPConstants;
import org.llrp.ltk.generated.parameters.Custom;
import org.llrp.ltk.types.BitList;
import org.llrp.ltk.types.LLRPBitList;
import org.llrp.ltk.types.LLRPMessage;
import org.llrp.ltk.types.SignedShort;
import org.llrp.ltk.types.UnsignedInteger;
import org.llrp.ltk.types.UnsignedShort;
import java.util.LinkedList;
import java.util.List;
/**
* This message is issued by the Reader to the Client. This message can be used by the Client to monitor the LLRP-layer connectivity with the Reader. The Client configures the trigger at the Reader to send the Keepalive message. The configuration is done using the KeepaliveSpec parameter (section 12.2.4).
See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=77&view=fit">LLRP Specification Section 13.1.3</a>}
and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=125&view=fit">LLRP Specification Section 16.1.31</a>}
*/
public class KEEPALIVE extends LLRPMessage {
public static final SignedShort TYPENUM = new SignedShort(62);
private static final Logger LOGGER = Logger.getLogger(KEEPALIVE.class);
public static final String RESPONSETYPE = "";
/**
* empty constructor to create new message
* with LLRP version set to 1.0 (0x1).
*/
public KEEPALIVE() {
setVersion(new BitList(0, 0, 1));
}
/**
* Constructor to create message from binary encoded message
* calls decodeBinary to decode message.
* @param list to be decoded
*/
public KEEPALIVE(final LLRPBitList list) throws InvalidLLRPMessageException {
decodeBinary(list.toByteArray());
}
/**
* Constructor to create message from binary encoded message
* calls decodeBinary to decode message.
* @param byteArray representing message
*/
public KEEPALIVE(final byte[] byteArray) throws InvalidLLRPMessageException {
decodeBinary(byteArray);
}
/**
* Constructor to create message from xml encoded message
* calls decodeXML to decode message.
* @param document to be decoded
*/
public KEEPALIVE(final Document document)
throws InvalidLLRPMessageException {
decodeXML(document);
}
/**
* {@inheritDoc}
*/
protected LLRPBitList encodeBinarySpecific()
throws InvalidLLRPMessageException {
LLRPBitList resultBits = new LLRPBitList();
return resultBits;
}
/**
* {@inheritDoc}
*/
public Document encodeXML() throws InvalidLLRPMessageException {
try {
Namespace ns = Namespace.getNamespace("llrp",
LLRPConstants.LLRPNAMESPACE);
Element root = new Element("KEEPALIVE", ns);
// Element root = new Element("KEEPALIVE");
root.addNamespaceDeclaration(Namespace.getNamespace("llrp",
LLRPConstants.LLRPNAMESPACE));
if (version == null) {
throw new InvalidLLRPMessageException("Version not set");
} else {
root.setAttribute("Version", version.toInteger().toString());
}
if (messageID == null) {
throw new InvalidLLRPMessageException("MessageID not set");
} else {
root.setAttribute("MessageID", messageID.toString(10));
}
//parameters
Document doc = new Document(root);
if (isValidXMLMessage(doc, LLRPConstants.LLRPMESSAGESCHEMAPATH)) {
return doc;
} else {
return null;
}
} catch (IllegalArgumentException e) {
throw new InvalidLLRPMessageException(e.getMessage());
} catch (MissingParameterException e) {
throw new InvalidLLRPMessageException(e.getMessage());
}
}
/**
* {@inheritDoc}
*/
protected void decodeBinarySpecific(LLRPBitList binary)
throws InvalidLLRPMessageException {
int position = 0;
int tempByteLength;
int tempLength = 0;
int count;
SignedShort type;
int fieldCount;
Custom custom;
}
/**
* {@inheritDoc}
*/
public void decodeXML(final Document document)
throws InvalidLLRPMessageException {
Element temp = null;
Custom custom;
// child element are always in default LLRP namespace
Namespace ns = Namespace.getNamespace(LLRPConstants.LLRPNAMESPACE);
try {
isValidXMLMessage(document, LLRPConstants.LLRPMESSAGESCHEMAPATH);
Element root = (Element) document.getRootElement().clone();
List<Element> tempList = null;
// the version field is always 3 bit long
// if the version attribute is not set in the LTK-XML message,
// it is set to version 001
String versionAttribute = root.getAttributeValue("Version");
if (versionAttribute != null) {
version = new BitList(3);
version.setValue(new Integer(versionAttribute));
} else {
version = new BitList(0, 0, 1);
}
messageID = new UnsignedInteger(root.getAttributeValue("MessageID"));
if (root.getChildren().size() > 0) {
String message = "KEEPALIVE has unknown element " +
((Element) root.getChildren().get(0)).getName();
throw new InvalidLLRPMessageException(message);
}
} catch (IllegalArgumentException e) {
throw new InvalidLLRPMessageException(e.getMessage());
} catch (MissingParameterException e) {
throw new InvalidLLRPMessageException(e.getMessage());
}
}
//setters
// end setter
//getters
// end getters
//add methods
// end add
@Override
public String getResponseType() {
return RESPONSETYPE;
}
@Override
public String getName() {
return "KEEPALIVE";
}
/**
* return uniuque type number.
* @return SignedShort holding unique type number
*/
public SignedShort getTypeNum() {
return TYPENUM;
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.zooming;
import java.net.URL;
import java.time.temporal.ChronoUnit;
import java.util.ResourceBundle;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.fxml.FXML;
import javafx.scene.control.*;
import javafx.util.StringConverter;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.TimeLineView;
import org.sleuthkit.autopsy.timeline.VisualizationMode;
import org.sleuthkit.autopsy.timeline.actions.Back;
import org.sleuthkit.autopsy.timeline.actions.Forward;
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.autopsy.timeline.utils.RangeDivisionInfo;
/**
* FXML Controller class for the ZoomSettingsPane.fxml
*
* has sliders to provide context/control over three axes of zooming (timescale,
* event hierarchy, and description detail).
*/
public class ZoomSettingsPane extends TitledPane implements TimeLineView {
@FXML
private ResourceBundle resources;
@FXML
private URL location;
@FXML
private Button backButton;
@FXML
private Button forwardButton;
@FXML
private Slider descrLODSlider;
@FXML
private Slider typeZoomSlider;
@FXML
private Slider timeUnitSlider;
@FXML
private Label descrLODLabel;
@FXML
private Label typeZoomLabel;
@FXML
private Label timeUnitLabel;
@FXML
private Label zoomLabel;
@FXML
private Label historyLabel;
private TimeLineController controller;
private FilteredEventsModel filteredEvents;
/**
* Initializes the controller class.
*/
public void initialize() {
timeUnitSlider.setMax(TimeUnits.values().length - 2);
timeUnitSlider.setLabelFormatter(new TimeUnitConverter());
typeZoomSlider.setMin(1);
typeZoomSlider.setMax(2);
typeZoomSlider.setLabelFormatter(new TypeZoomConverter());
descrLODSlider.setMax(DescriptionLOD.values().length - 1);
descrLODSlider.setLabelFormatter(new DescrLODConverter());
descrLODLabel.setText(
NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.descrLODLabel.text"));
typeZoomLabel.setText(NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.typeZoomLabel.text"));
timeUnitLabel.setText(NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.timeUnitLabel.text"));
zoomLabel.setText(NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.zoomLabel.text"));
historyLabel.setText(NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.historyLabel.text"));
}
public ZoomSettingsPane() {
FXMLConstructor.construct(this, "ZoomSettingsPane.fxml"); // NON-NLS
}
@Override
synchronized public void setController(TimeLineController controller) {
this.controller = controller;
setModel(controller.getEventsModel());
descrLODSlider.disableProperty().bind(controller.getViewMode().isEqualTo(VisualizationMode.COUNTS));
Back back = new Back(controller);
backButton.disableProperty().bind(back.disabledProperty());
backButton.setOnAction(back);
backButton.setTooltip(new Tooltip(
NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.backButton.toolTip.text",
back.getAccelerator().getName())));
Forward forward = new Forward(controller);
forwardButton.disableProperty().bind(forward.disabledProperty());
forwardButton.setOnAction(forward);
forwardButton.setTooltip(new Tooltip(
NbBundle.getMessage(this.getClass(), "ZoomSettingsPane.forwardButton.toolTip.text",
forward.getAccelerator().getName())));
}
@Override
public void setModel(FilteredEventsModel filteredEvents) {
this.filteredEvents = filteredEvents;
initializeSlider(timeUnitSlider,
() -> {
TimeUnits requestedUnit = TimeUnits.values()[new Double(timeUnitSlider.getValue()).intValue()];
if (requestedUnit == TimeUnits.FOREVER) {
controller.showFullRange();
} else {
controller.pushTimeRange(IntervalUtils.getIntervalAround(IntervalUtils.middleOf(ZoomSettingsPane.this.filteredEvents.timeRangeProperty().get()), requestedUnit.getPeriod()));
}
},
this.filteredEvents.timeRangeProperty(),
() -> {
RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(this.filteredEvents.timeRangeProperty().get());
ChronoUnit chronoUnit = rangeInfo.getPeriodSize().getChronoUnit();
timeUnitSlider.setValue(TimeUnits.fromChronoUnit(chronoUnit).ordinal() - 1);
});
initializeSlider(descrLODSlider,
() -> {
DescriptionLOD newLOD = DescriptionLOD.values()[Math.round(descrLODSlider.valueProperty().floatValue())];
if (controller.pushDescrLOD(newLOD) == false) {
descrLODSlider.setValue(new DescrLODConverter().fromString(filteredEvents.getDescriptionLOD().toString()));
}
}, this.filteredEvents.descriptionLODProperty(),
() -> {
descrLODSlider.setValue(this.filteredEvents.descriptionLODProperty().get().ordinal());
});
initializeSlider(typeZoomSlider,
() -> {
EventTypeZoomLevel newZoomLevel = EventTypeZoomLevel.values()[Math.round(typeZoomSlider.valueProperty().floatValue())];
controller.pushEventTypeZoom(newZoomLevel);
},
this.filteredEvents.eventTypeZoomProperty(),
() -> {
typeZoomSlider.setValue(this.filteredEvents.eventTypeZoomProperty().get().ordinal());
});
}
/**
* setup a slider that with a listener that is added and removed to avoid
* circular updates.
*
* @param <T> the type of the driving property
* @param slider the slider that will have its change handlers
* setup
* @param sliderChangeHandler the runnable that will be executed whenever
* the slider value has changed and is not
* currently changing
* @param driver the property that drives updates to this
* slider
* @param driverChangHandler the code to update the slider bases on the
* value of the driving property. This will be
* wrapped in a remove/add-listener pair to
* prevent circular updates.
*/
private <T> void initializeSlider(Slider slider, Runnable sliderChangeHandler, ReadOnlyObjectProperty<T> driver, Runnable driverChangHandler) {
final InvalidationListener sliderListener = observable -> {
if (slider.isValueChanging() == false) {
sliderChangeHandler.run();
}
};
slider.valueProperty().addListener(sliderListener);
slider.valueChangingProperty().addListener(sliderListener);
Platform.runLater(driverChangHandler);
driver.addListener(observable -> {
slider.valueProperty().removeListener(sliderListener);
slider.valueChangingProperty().removeListener(sliderListener);
Platform.runLater(driverChangHandler);
slider.valueProperty().addListener(sliderListener);
slider.valueChangingProperty().addListener(sliderListener);
});
}
//Can these be abstracted to a sort of Enum converter for use in a potential enumslider
private static class TimeUnitConverter extends StringConverter<Double> {
@Override
public String toString(Double object) {
return TimeUnits.values()[Math.min(TimeUnits.values().length - 1, object.intValue() + 1)].toString();
}
@Override
public Double fromString(String string) {
return new Integer(TimeUnits.valueOf(string).ordinal()).doubleValue();
}
}
private static class TypeZoomConverter extends StringConverter<Double> {
@Override
public String toString(Double object) {
return EventTypeZoomLevel.values()[object.intValue()].getDisplayName();
}
@Override
public Double fromString(String string) {
return new Integer(EventTypeZoomLevel.valueOf(string).ordinal()).doubleValue();
}
}
private static class DescrLODConverter extends StringConverter<Double> {
@Override
public String toString(Double object) {
return DescriptionLOD.values()[object.intValue()].getDisplayName();
}
@Override
public Double fromString(String string) {
return new Integer(DescriptionLOD.valueOf(string).ordinal()).doubleValue();
}
}
}
| |
/**
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.MessageFormat;
import java.util.Date;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.logging.StreamHandler;
/**
* Simple wrapper around java.util.logging
* Adds compile time log level.
* The compiler removes completely if statements that reference to a false constant
*
* <code>
* if (DEBUG) LOG.debug("removed by the compiler if DEBUG is a false constant")
* </code>
*
* @author Julien Le Dem
*
*/
public class Log {
/**
* this is the compile time log level
*/
public static final Level LEVEL = Level.INFO; // should be INFO unless for debugging
public static final boolean DEBUG = (LEVEL.intValue() <= Level.FINE.intValue());
public static final boolean INFO = (LEVEL.intValue() <= Level.INFO.intValue());
public static final boolean WARN = (LEVEL.intValue() <= Level.WARNING.intValue());
public static final boolean ERROR = (LEVEL.intValue() <= Level.SEVERE.intValue());
static {
// add a default handler in case there is none
Logger logger = Logger.getLogger(Log.class.getPackage().getName());
Handler[] handlers = logger.getHandlers();
if (handlers == null || handlers.length == 0) {
logger.setUseParentHandlers(false);
StreamHandler handler = new StreamHandler(System.out, new Formatter() {
Date dat = new Date();
private final static String format = "{0,date} {0,time}";
private MessageFormat formatter = new MessageFormat(format);
private Object args[] = new Object[1];
/**
* Format the given LogRecord.
* @param record the log record to be formatted.
* @return a formatted log record
*/
public synchronized String format(LogRecord record) {
StringBuffer sb = new StringBuffer();
// Minimize memory allocations here.
dat.setTime(record.getMillis());
args[0] = dat;
formatter.format(args, sb, null);
sb.append(" ");
sb.append(record.getLevel().getLocalizedName());
sb.append(": ");
sb.append(record.getLoggerName());
sb.append(": ");
sb.append(formatMessage(record));
sb.append("\n");
if (record.getThrown() != null) {
try {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
record.getThrown().printStackTrace(pw);
pw.close();
sb.append(sw.toString());
} catch (Exception ex) {
}
}
return sb.toString();
}
});
handler.setLevel(LEVEL);
logger.addHandler(handler);
}
logger.setLevel(LEVEL);
}
/**
*
* @param c the current class
* @return the corresponding logger
*/
public static Log getLog(Class<?> c) {
return new Log(c);
}
private Logger logger;
public Log(Class<?> c) {
this.logger = Logger.getLogger(c.getName());
}
/**
* prints a debug message
* @param m
*/
public void debug(Object m) {
if (m instanceof Throwable) {
logger.log(Level.FINE, "", (Throwable)m);
} else {
logger.fine(String.valueOf(m));
}
}
/**
* prints a debug message
* @param m
* @param t
*/
public void debug(Object m, Throwable t) {
logger.log(Level.FINE, String.valueOf(m), t);
}
/**
* prints an info message
* @param m
*/
public void info(Object m) {
if (m instanceof Throwable) {
logger.log(Level.INFO, "", (Throwable)m);
} else {
logger.info(String.valueOf(m));
}
}
/**
* prints an info message
* @param m
* @param t
*/
public void info(Object m, Throwable t) {
logger.log(Level.INFO, String.valueOf(m), t);
}
/**
* prints a warn message
* @param m
*/
public void warn(Object m) {
if (m instanceof Throwable) {
logger.log(Level.WARNING, "", (Throwable)m);
} else {
logger.warning(String.valueOf(m));
}
}
/**
* prints a warn message
* @param m
* @param t
*/
public void warn(Object m, Throwable t) {
logger.log(Level.WARNING, String.valueOf(m), t);
}
/**
* prints an error message
* @param m
*/
public void error(Object m) {
if (m instanceof Throwable) {
logger.log(Level.SEVERE, "", (Throwable)m);
} else {
logger.warning(String.valueOf(m));
}
}
/**
* prints an error message
* @param m
* @param t
*/
public void error(Object m, Throwable t) {
logger.log(Level.SEVERE, String.valueOf(m), t);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, Yahoo! Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.tasks;
import hudson.model.Build;
import hudson.model.FreeStyleProject;
import jenkins.mvn.DefaultGlobalSettingsProvider;
import jenkins.mvn.DefaultSettingsProvider;
import jenkins.mvn.FilePathGlobalSettingsProvider;
import jenkins.mvn.FilePathSettingsProvider;
import jenkins.mvn.GlobalMavenConfig;
import hudson.model.JDK;
import hudson.model.ParametersDefinitionProperty;
import hudson.model.Result;
import hudson.model.StringParameterDefinition;
import hudson.model.ParametersAction;
import hudson.model.StringParameterValue;
import hudson.model.Cause.LegacyCodeCause;
import hudson.slaves.EnvironmentVariablesNodeProperty;
import hudson.slaves.EnvironmentVariablesNodeProperty.Entry;
import hudson.tasks.Maven.MavenInstallation;
import hudson.tasks.Maven.MavenInstaller;
import hudson.tasks.Maven.MavenInstallation.DescriptorImpl;
import hudson.tools.ToolProperty;
import hudson.tools.ToolPropertyDescriptor;
import hudson.tools.InstallSourceProperty;
import hudson.util.DescribableList;
import java.util.Collections;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import com.gargoylesoftware.htmlunit.html.HtmlButton;
import hudson.EnvVars;
import hudson.model.FreeStyleBuild;
import hudson.model.PasswordParameterDefinition;
import org.jvnet.hudson.test.Issue;
import static org.junit.Assert.*;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.ExtractResourceSCM;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.ToolInstallations;
/**
* @author Kohsuke Kawaguchi
*/
public class MavenTest {
@Rule public JenkinsRule j = new JenkinsRule();
/**
* Tests the round-tripping of the configuration.
*/
@Test public void configRoundtrip() throws Exception {
j.jenkins.getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(); // reset
FreeStyleProject p = j.createFreeStyleProject();
p.getBuildersList().add(new Maven("a", null, "b.pom", "c=d", "-e", true));
JenkinsRule.WebClient webClient = j.createWebClient();
HtmlPage page = webClient.getPage(p, "configure");
HtmlForm form = page.getFormByName("config");
j.submit(form);
Maven m = p.getBuildersList().get(Maven.class);
assertNotNull(m);
assertEquals("a", m.targets);
assertNull("found " + m.mavenName, m.mavenName);
assertEquals("b.pom", m.pom);
assertEquals("c=d", m.properties);
assertEquals("-e", m.jvmOptions);
assertTrue(m.usesPrivateRepository());
}
@Test public void withNodeProperty() throws Exception {
MavenInstallation maven = ToolInstallations.configureDefaultMaven();
String mavenHome = maven.getHome();
String mavenHomeVar = "${VAR_MAVEN}" + mavenHome.substring(3);
String mavenVar = mavenHome.substring(0, 3);
MavenInstallation varMaven = new MavenInstallation("varMaven", mavenHomeVar, JenkinsRule.NO_PROPERTIES);
j.jenkins.getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(maven, varMaven);
JDK jdk = j.jenkins.getJDK("default");
String javaHome = jdk.getHome();
String javaHomeVar = "${VAR_JAVA}" + javaHome.substring(3);
String javaVar = javaHome.substring(0, 3);
JDK varJDK = new JDK("varJDK", javaHomeVar);
j.jenkins.getJDKs().add(varJDK);
j.jenkins.getNodeProperties().replaceBy(
Collections.singleton(new EnvironmentVariablesNodeProperty(
new Entry("VAR_MAVEN", mavenVar), new Entry("VAR_JAVA",
javaVar))));
FreeStyleProject project = j.createFreeStyleProject();
project.getBuildersList().add(new Maven("--help", varMaven.getName()));
project.setJDK(varJDK);
Build<?, ?> build = project.scheduleBuild2(0).get();
assertEquals(Result.SUCCESS, build.getResult());
}
@Test public void withParameter() throws Exception {
MavenInstallation maven = ToolInstallations.configureDefaultMaven();
String mavenHome = maven.getHome();
String mavenHomeVar = "${VAR_MAVEN}" + mavenHome.substring(3);
String mavenVar = mavenHome.substring(0, 3);
MavenInstallation varMaven = new MavenInstallation("varMaven",mavenHomeVar, JenkinsRule.NO_PROPERTIES);
j.jenkins.getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(maven, varMaven);
JDK jdk = j.jenkins.getJDK("default");
String javaHome = jdk.getHome();
String javaHomeVar = "${VAR_JAVA}" + javaHome.substring(3);
String javaVar = javaHome.substring(0, 3);
JDK varJDK = new JDK("varJDK", javaHomeVar);
j.jenkins.getJDKs().add(varJDK);
FreeStyleProject project = j.createFreeStyleProject();
project.addProperty(new ParametersDefinitionProperty(
new StringParameterDefinition("VAR_MAVEN", "XXX"),
new StringParameterDefinition("VAR_JAVA", "XXX")));
project.getBuildersList().add(new Maven("--help", varMaven.getName()));
project.setJDK(varJDK);
FreeStyleBuild build = project.scheduleBuild2(0, new LegacyCodeCause(),
new ParametersAction(
new StringParameterValue("VAR_MAVEN", mavenVar),
new StringParameterValue("VAR_JAVA", javaVar))).get();
j.assertBuildStatusSuccess(build);
}
/**
* Simulates the addition of the new Maven via UI and makes sure it works.
*/
@Test public void globalConfigAjax() throws Exception {
HtmlPage p = j.createWebClient().goTo("configure");
HtmlForm f = p.getFormByName("config");
HtmlButton b = j.getButtonByCaption(f, "Add Maven");
b.click();
j.findPreviousInputElement(b,"name").setValueAttribute("myMaven");
j.findPreviousInputElement(b,"home").setValueAttribute("/tmp/foo");
j.submit(f);
verify();
// another submission and verfify it survives a roundtrip
p = j.createWebClient().goTo("configure");
f = p.getFormByName("config");
j.submit(f);
verify();
}
private void verify() throws Exception {
MavenInstallation[] l = j.get(DescriptorImpl.class).getInstallations();
assertEquals(1,l.length);
j.assertEqualBeans(l[0],new MavenInstallation("myMaven","/tmp/foo", JenkinsRule.NO_PROPERTIES),"name,home");
// by default we should get the auto installer
DescribableList<ToolProperty<?>,ToolPropertyDescriptor> props = l[0].getProperties();
assertEquals(1,props.size());
InstallSourceProperty isp = props.get(InstallSourceProperty.class);
assertEquals(1,isp.installers.size());
assertNotNull(isp.installers.get(MavenInstaller.class));
}
@Test public void sensitiveParameters() throws Exception {
FreeStyleProject project = j.createFreeStyleProject();
ParametersDefinitionProperty pdb = new ParametersDefinitionProperty(
new StringParameterDefinition("string", "defaultValue", "string description"),
new PasswordParameterDefinition("password", "12345", "password description"),
new StringParameterDefinition("string2", "Value2", "string description")
);
project.addProperty(pdb);
project.setScm(new ExtractResourceSCM(getClass().getResource("maven-empty.zip")));
project.getBuildersList().add(new Maven("clean package",null));
FreeStyleBuild build = project.scheduleBuild2(0).get();
@SuppressWarnings("deprecation")
String buildLog = build.getLog();
assertNotNull(buildLog);
System.out.println(buildLog);
assertFalse(buildLog.contains("-Dpassword=12345"));
}
@Test
public void parametersReferencedFromPropertiesShouldRetainBackslashes() throws Exception {
final String properties = "global.path=$GLOBAL_PATH\nmy.path=$PATH\\\\Dir";
final StringParameterDefinition parameter = new StringParameterDefinition("PATH", "C:\\Windows");
final Entry envVar = new Entry("GLOBAL_PATH", "D:\\Jenkins");
FreeStyleProject project = j.createFreeStyleProject();
project.getBuildersList().add(new Maven("--help",null,null,properties,null));
project.addProperty(new ParametersDefinitionProperty(parameter));
j.jenkins.getNodeProperties().replaceBy(Collections.singleton(
new EnvironmentVariablesNodeProperty(envVar)
));
FreeStyleBuild build = project.scheduleBuild2(0).get();
@SuppressWarnings("deprecation")
String buildLog = build.getLog();
assertNotNull(buildLog);
assertTrue(
"Parameter my.path should preserve backslashes in:\n" + buildLog,
buildLog.contains("-Dmy.path=C:\\Windows\\Dir")
);
assertTrue(
"Parameter global.path should preserve backslashes in:\n" + buildLog,
buildLog.contains("-Dglobal.path=D:\\Jenkins")
);
}
@Test public void defaultSettingsProvider() throws Exception {
{
FreeStyleProject p = j.createFreeStyleProject();
p.getBuildersList().add(new Maven("a", null, "a.pom", "c=d", "-e", true));
Maven m = p.getBuildersList().get(Maven.class);
assertNotNull(m);
assertEquals(DefaultSettingsProvider.class, m.getSettings().getClass());
assertEquals(DefaultGlobalSettingsProvider.class, m.getGlobalSettings().getClass());
}
{
GlobalMavenConfig globalMavenConfig = GlobalMavenConfig.get();
assertNotNull("No global Maven Config available", globalMavenConfig);
globalMavenConfig.setSettingsProvider(new FilePathSettingsProvider("/tmp/settigns.xml"));
globalMavenConfig.setGlobalSettingsProvider(new FilePathGlobalSettingsProvider("/tmp/global-settigns.xml"));
FreeStyleProject p = j.createFreeStyleProject();
p.getBuildersList().add(new Maven("b", null, "b.pom", "c=d", "-e", true));
Maven m = p.getBuildersList().get(Maven.class);
assertEquals(FilePathSettingsProvider.class, m.getSettings().getClass());
assertEquals("/tmp/settigns.xml", ((FilePathSettingsProvider)m.getSettings()).getPath());
assertEquals("/tmp/global-settigns.xml", ((FilePathGlobalSettingsProvider)m.getGlobalSettings()).getPath());
}
}
@Issue("JENKINS-18898")
@Test public void testNullHome() {
EnvVars env = new EnvVars();
new MavenInstallation("_", "", Collections.<ToolProperty<?>>emptyList()).buildEnvVars(env);
assertEquals("{}", env.toString());
}
@Issue("JENKINS-26684")
@Test public void specialCharsInBuildVariablesPassedAsProperties() throws Exception {
MavenInstallation maven = ToolInstallations.configureMaven3();
FreeStyleProject p = j.createFreeStyleProject();
p.getBuildersList().add(new Maven("--help", maven.getName()));
p.addProperty(new ParametersDefinitionProperty(
new StringParameterDefinition("tilde", "~"),
new StringParameterDefinition("exclamation_mark", "!"),
new StringParameterDefinition("at_sign", "@"),
new StringParameterDefinition("sharp", "#"),
new StringParameterDefinition("dolar", "$"),
new StringParameterDefinition("percent", "%"),
new StringParameterDefinition("circumflex", "^"),
new StringParameterDefinition("ampersand", "&"),
new StringParameterDefinition("asterix", "*"),
new StringParameterDefinition("parentheses", "()"),
new StringParameterDefinition("underscore", "_"),
new StringParameterDefinition("plus", "+"),
new StringParameterDefinition("braces", "{}"),
new StringParameterDefinition("brackets", "[]"),
new StringParameterDefinition("colon", ":"),
new StringParameterDefinition("semicolon", ";"),
new StringParameterDefinition("quote", "\""),
new StringParameterDefinition("apostrophe", "'"),
new StringParameterDefinition("backslash", "\\"),
new StringParameterDefinition("pipe", "|"),
new StringParameterDefinition("angle_brackets", "<>"),
new StringParameterDefinition("comma", ","),
new StringParameterDefinition("period", "."),
new StringParameterDefinition("slash", "/"),
new StringParameterDefinition("question_mark", "?"),
new StringParameterDefinition("space", " ")
));
FreeStyleBuild build = j.buildAndAssertSuccess(p);
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.greenplum.model;
import org.jkiss.dbeaver.ext.postgresql.model.*;
import org.jkiss.dbeaver.model.exec.DBCExecutionPurpose;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@RunWith(MockitoJUnitRunner.class)
public class GreenplumTableTest {
@Mock
PostgreSchema mockSchema;
@Mock
PostgreDatabase mockDatabase;
@Mock
JDBCResultSet mockResults;
@Mock
JDBCExecutionContext mockContext;
@Mock
GreenplumDataSource mockDataSource;
@Mock
DBRProgressMonitor mockMonitor;
@Mock
PostgreSchema.TableCache mockTableCache;
@Mock
PostgreSchema.ConstraintCache mockConstraintCache;
@Mock
private PostgreServerGreenplum mockServerGreenplum;
private GreenplumTable table;
private final String exampleDatabaseName = "sampleDatabase";
private final String exampleSchemaName = "sampleSchema";
private final String exampleTableName = "sampleTable";
@Before
public void setUp() throws Exception {
Mockito.when(mockSchema.getDatabase()).thenReturn(mockDatabase);
Mockito.when(mockSchema.getSchema()).thenReturn(mockSchema);
Mockito.when(mockSchema.getDataSource()).thenReturn(mockDataSource);
Mockito.when(mockSchema.getName()).thenReturn(exampleSchemaName);
Mockito.when(mockSchema.getTableCache()).thenReturn(mockTableCache);
Mockito.when(mockSchema.getConstraintCache()).thenReturn(mockConstraintCache);
Mockito.when(mockDataSource.getSQLDialect()).thenReturn(new PostgreDialect());
Mockito.when(mockDataSource.isServerVersionAtLeast(Mockito.anyInt(), Mockito.anyInt())).thenReturn(false);
Mockito.when(mockDataSource.getDefaultInstance()).thenReturn(mockDatabase);
Mockito.when(mockDataSource.getServerType()).thenReturn(mockServerGreenplum);
Mockito.when(mockDatabase.getName()).thenReturn(exampleDatabaseName);
Mockito.when(mockDatabase.getDefaultContext(Mockito.anyObject(), Mockito.anyBoolean())).thenReturn(mockContext);
Mockito.when(mockDatabase.isInstanceConnected()).thenReturn(true);
Mockito.when(mockResults.getString("relname")).thenReturn(exampleTableName);
Mockito.when(mockResults.getString("relpersistence")).thenReturn("x");
}
@Test
public void appendTableModifiers_whenServerVersion8_andNoColumnSetForDistribution_resultsInRandom() throws Exception {
StringBuilder ddl = new StringBuilder();
JDBCResultSet mockDCResults = mockResults(mockMonitor);
Mockito.when(mockDCResults.next()).thenReturn(false);
List<PostgreTableConstraint> constraints = Collections.emptyList();
table = new GreenplumTable(mockSchema, mockResults);
Mockito.when(mockConstraintCache.getTypedObjects(mockMonitor, mockSchema, table, PostgreTableConstraint.class))
.thenReturn(constraints);
table.appendTableModifiers(mockMonitor, ddl);
Assert.assertEquals("\nDISTRIBUTED RANDOMLY", ddl.toString());
}
@Test
public void appendTableModifiers_whenServerVersion8_andSingleColumnSetForDistribution_resultsInDistributedByThatColumn() throws Exception {
StringBuilder ddl = new StringBuilder();
JDBCResultSet mockDCResults = mockResults(mockMonitor);
Mockito.when(mockDCResults.next()).thenReturn(true);
Mockito.when(mockDCResults.getObject(1)).thenReturn(new int[]{1});
List<PostgreTableColumn> mockColumns = createMockColumns("Column_Name");
Mockito.when(mockSchema.getTableCache()).thenReturn(mockTableCache);
table = new GreenplumTable(mockSchema, mockResults);
Mockito.when(mockTableCache.getChildren(mockMonitor, mockSchema, table))
.thenReturn(mockColumns);
table.appendTableModifiers(mockMonitor, ddl);
Assert.assertEquals("\nDISTRIBUTED BY (Column_Name)", ddl.toString());
}
@Test
public void appendTableModifiers_whenServerVersion8_andMultipleSingleColumnSetForDistribution_resultsInDistributedByThoseColumns() throws Exception {
StringBuilder ddl = new StringBuilder();
JDBCResultSet mockDCResults = mockResults(mockMonitor);
Mockito.when(mockDCResults.next()).thenReturn(true);
Mockito.when(mockDCResults.getObject(1)).thenReturn(new int[]{1, 2});
List<PostgreTableColumn> mockColumns = createMockColumns("Column_1", "Column_2");
Mockito.when(mockSchema.getTableCache()).thenReturn(mockTableCache);
table = new GreenplumTable(mockSchema, mockResults);
Mockito.when(mockTableCache.getChildren(mockMonitor, mockSchema, table))
.thenReturn(mockColumns);
table.appendTableModifiers(mockMonitor, ddl);
Assert.assertEquals("\nDISTRIBUTED BY (Column_1, Column_2)", ddl.toString());
}
@Test
public void appendTableModifiers_whenServerVersion9_andNotReplicated_andNoColumnSetForDistribution_resultsInRandom() throws Exception {
StringBuilder ddl = new StringBuilder();
JDBCResultSet mockDCResults = mockResults(mockMonitor);
Mockito.when(mockDCResults.next()).thenReturn(false, true);
Mockito.when(mockDCResults.getString(1)).thenReturn("x");
Mockito.when(mockDataSource.isServerVersionAtLeast(Mockito.anyInt(), Mockito.anyInt())).thenReturn(true);
table = new GreenplumTable(mockSchema, mockResults);
table.appendTableModifiers(mockMonitor, ddl);
Assert.assertEquals("\nDISTRIBUTED RANDOMLY", ddl.toString());
}
@Test
public void appendTableModifiers_whenServerVersion9_andIsReplicated_resultsInReplicated() throws Exception {
StringBuilder ddl = new StringBuilder();
JDBCResultSet mockDCResults = mockResults(mockMonitor);
Mockito.when(mockDCResults.next()).thenReturn(false, true);
Mockito.when(mockDCResults.getString(1)).thenReturn("r");
Mockito.when(mockDataSource.isServerVersionAtLeast(Mockito.anyInt(), Mockito.anyInt())).thenReturn(true);
table = new GreenplumTable(mockSchema, mockResults);
table.appendTableModifiers(mockMonitor, ddl);
Assert.assertEquals("\nDISTRIBUTED REPLICATED", ddl.toString());
}
private JDBCResultSet mockResults(DBRProgressMonitor monitor) throws SQLException {
JDBCSession mockSession = Mockito.mock(JDBCSession.class);
JDBCStatement mockStatement = Mockito.mock(JDBCStatement.class);
JDBCResultSet mockDCResults = Mockito.mock(JDBCResultSet.class);
Mockito.when(mockContext.openSession(Mockito.eq(monitor), Mockito.eq(DBCExecutionPurpose.META),
Mockito.anyString())).thenReturn(mockSession);
Mockito.when(mockSession.createStatement()).thenReturn(mockStatement);
Mockito.when(mockStatement.executeQuery(Mockito.anyString())).thenReturn(mockDCResults);
return mockDCResults;
}
private List<PostgreTableColumn> createMockColumns(String... columns) {
return IntStream.range(0, columns.length)
.mapToObj(i -> {
String columnName = columns[i];
PostgreTableColumn mockColumn = Mockito.mock(PostgreTableColumn.class);
Mockito.when(mockColumn.getOrdinalPosition()).thenReturn(i + 1);
Mockito.when(mockColumn.getDataSource()).thenReturn(mockDataSource);
Mockito.when(mockColumn.getName()).thenReturn(columnName);
return mockColumn;
}).collect(Collectors.toList());
}
}
| |
/*
* Copyright 1996-2011 Niclas Hedhman.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.qi4j.library.alarm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import org.qi4j.api.entity.Identity;
import org.qi4j.api.injection.scope.Structure;
import org.qi4j.api.mixin.Mixins;
import org.qi4j.api.service.ServiceComposite;
import org.qi4j.api.value.ValueBuilder;
import org.qi4j.api.value.ValueBuilderFactory;
/**
* The Standard AlarmPoint Model is centered around the Normal, Activated, Acknowledged
* and Deactivated states, and the triggers "activate", "deactivate",
* and "acknowledge".
* <p>
* The following matrix details the resulting grid.
* </p>
* <table summary="Transitions">
* <tr><th>Initial State</th><th>Trigger</th><th>Resulting State</th><th>Event Generated</th></tr>
* <tr><td>Normal</td><td>activate</td><td>Activated</td><td>activation</td></tr>
* <tr><td>Normal</td><td>deactivate</td><td>Normal</td><td>-</td></tr>
* <tr><td>Normal</td><td>acknowledge</td><td>Normal</td><td>-</td></tr>
* <tr><td>Activated</td><td>activate</td><td>Activated</td><td>-</td></tr>
* <tr><td>Activated</td><td>deactivate</td><td>Deactivated</td><td>deactivation</td></tr>
* <tr><td>Activated</td><td>acknowledge</td><td>Acknowledged</td><td>acknowledge</td></tr>
* <tr><td>Deactivated</td><td>activate</td><td>Activated</td><td>activation</td></tr>
* <tr><td>Deactivated</td><td>deactivate</td><td>Deativated</td><td>-</td></tr>
* <tr><td>Deactivated</td><td>acknowledge</td><td>Normal</td><td>acknowledge</td></tr>
* <tr><td>Acknowledged</td><td>activate</td><td>Acknowledged</td><td>-</td></tr>
* <tr><td>Acknowledged</td><td>deactivate</td><td>Normal</td><td>deactivation</td></tr>
* <tr><td>Acknowledged</td><td>acknowledge</td><td>Acknowledged</td><td>-</td></tr>
* </table>
*/
@Mixins( StandardAlarmModelService.StandardAlarmModelMixin.class )
public interface StandardAlarmModelService extends AlarmModel, ServiceComposite
{
class StandardAlarmModelMixin
implements AlarmModel
{
private static final List<String> TRIGGER_LIST;
private static final List<String> STATUS_LIST;
static
{
List<String> list1 = new ArrayList<String>();
list1.add( AlarmPoint.STATUS_NORMAL );
list1.add( AlarmPoint.STATUS_ACTIVATED );
list1.add( AlarmPoint.STATUS_DEACTIVATED );
list1.add( AlarmPoint.STATUS_ACKNOWLEDGED );
STATUS_LIST = Collections.unmodifiableList( list1 );
List<String> list2 = new ArrayList<String>();
list2.add( AlarmPoint.TRIGGER_ACTIVATE );
list2.add( AlarmPoint.TRIGGER_DEACTIVATE );
list2.add( AlarmPoint.TRIGGER_ACKNOWLEDGE );
TRIGGER_LIST = Collections.unmodifiableList( list2 );
}
@Structure
private ValueBuilderFactory vbf;
static ResourceBundle getResourceBundle( Locale locale )
{
if( locale == null )
{
locale = Locale.getDefault();
}
ClassLoader cl = StandardAlarmModelMixin.class.getClassLoader();
return ResourceBundle.getBundle( MODEL_BUNDLE_NAME, locale, cl );
}
/**
* Returns the Name of the AlarmModel.
* This normally returns the human readable technical name of
* the AlarmModel.
*/
@Override
public String modelName()
{
return "org.qi4j.library.alarm.model.standard";
}
/**
* Returns a Description of the AlarmModel in the default Locale.
* This normally returns a full Description of the AlarmModel in the
* default Locale.
*
* @return the description of the ModelProvider.
*/
@Override
public String modelDescription()
{
return modelDescription( null );
}
/**
* Returns a Description of the AlarmModel.
* This normally returns a full Description of the AlarmModel in the
* Locale. If Locale is <code><b>null</b></code>, then the
* default Locale is used.
*/
@Override
public String modelDescription( Locale locale )
{
ResourceBundle rb = getResourceBundle( locale );
return rb.getString( "MODEL_DESCRIPTION_STANDARD" );
}
/**
* Execute the required changes upon an AlarmTrigger.
* The AlarmSystem calls this method, for the AlarmStatus
* in the the AlarmPoint to be updated, as well as an AlarmEvent
* to be created.
*
* @param alarm the AlarmPoint object to be updated.
* @param trigger the AlarmTrigger that was used.
*/
@Override
public AlarmEvent evaluate( AlarmPoint alarm, String trigger )
{
if( trigger.equals( AlarmPoint.TRIGGER_ACTIVATE ) )
{
return activation( alarm );
}
else if( trigger.equals( AlarmPoint.TRIGGER_DEACTIVATE ) )
{
return deactivation( alarm );
}
else if( trigger.equals( AlarmPoint.TRIGGER_ACKNOWLEDGE ) )
{
return acknowledge( alarm );
}
else
{
throw new IllegalArgumentException( "'" + trigger + "' is not supported by this AlarmModel." );
}
}
/**
* Returns all the supported AlarmPoint triggers.
*/
@Override
public List<String> alarmTriggers()
{
return TRIGGER_LIST;
}
@Override
public List<String> statusList()
{
return STATUS_LIST;
}
@Override
public String computeTrigger( AlarmStatus status, boolean condition )
{
if( condition )
{
if( ( status.name( null ).equals( AlarmPoint.STATUS_DEACTIVATED ) ) ||
( status.name( null ).equals( AlarmPoint.STATUS_NORMAL ) ) )
{
return AlarmPoint.TRIGGER_ACTIVATE;
}
}
else
{
if( ( status.name( null ).equals( AlarmPoint.STATUS_ACTIVATED ) ) ||
( status.name( null ).equals( AlarmPoint.STATUS_ACKNOWLEDGED ) ) )
{
return AlarmPoint.TRIGGER_DEACTIVATE;
}
}
return null;
}
@Override
public boolean computeCondition( AlarmStatus status )
{
return ( status.name( null ).equals( AlarmPoint.STATUS_ACTIVATED ) ) ||
( status.name( null ).equals( AlarmPoint.STATUS_ACKNOWLEDGED ) );
}
/**
* StateMachine change for activate trigger.
*
* @param alarm the alarm that is being triggered.
*
* @return The event to be fired on activation.
*/
private AlarmEvent activation( AlarmPoint alarm )
{
AlarmStatus oldStatus = alarm.currentStatus();
if( ( oldStatus.name( null ).equals( AlarmPoint.STATUS_NORMAL ) ) ||
( oldStatus.name( null ).equals( AlarmPoint.STATUS_DEACTIVATED ) ) )
{
AlarmStatus newStatus = createStatus( AlarmPoint.STATUS_ACTIVATED );
return createEvent( ( (Identity) alarm ), oldStatus, newStatus, AlarmPoint.EVENT_ACTIVATION );
}
return null;
}
/**
* StateMachine change for activate trigger.
*
* @param alarm the alarm that is being triggered.
*
* @return The event to be fired on deactivation.
*/
private AlarmEvent deactivation( AlarmPoint alarm )
{
AlarmStatus oldStatus = alarm.currentStatus();
if( oldStatus.name( null ).equals( AlarmPoint.STATUS_ACKNOWLEDGED ) )
{
AlarmStatus newStatus = createStatus( AlarmPoint.STATUS_NORMAL );
return createEvent( ( (Identity) alarm ), oldStatus, newStatus, AlarmPoint.EVENT_DEACTIVATION );
}
else if( oldStatus.name( null ).equals( AlarmPoint.STATUS_ACTIVATED ) )
{
AlarmStatus newStatus = createStatus( AlarmPoint.STATUS_DEACTIVATED );
return createEvent( ( (Identity) alarm ), oldStatus, newStatus, AlarmPoint.EVENT_DEACTIVATION );
}
return null;
}
/**
* StateMachine change for activate trigger.
*
* @param alarm the alarm that is being triggered.
*
* @return The event to be fired on acknowledge.
*/
private AlarmEvent acknowledge( AlarmPoint alarm )
{
AlarmStatus oldStatus = alarm.currentStatus();
if( oldStatus.name( null ).equals( AlarmPoint.STATUS_DEACTIVATED ) )
{
AlarmStatus newStatus = createStatus( AlarmPoint.STATUS_NORMAL );
return createEvent( ( (Identity) alarm ), oldStatus, newStatus, AlarmPoint.EVENT_ACKNOWLEDGEMENT );
}
else if( oldStatus.name( null ).equals( AlarmPoint.STATUS_ACTIVATED ) )
{
AlarmStatus newStatus = createStatus( AlarmPoint.STATUS_ACKNOWLEDGED );
return createEvent( ( (Identity) alarm ), oldStatus, newStatus, AlarmPoint.EVENT_ACKNOWLEDGEMENT );
}
return null;
}
private AlarmStatus createStatus( String status )
{
ValueBuilder<AlarmStatus> builder = vbf.newValueBuilder( AlarmStatus.class );
AlarmStatus.State prototype = builder.prototypeFor( AlarmStatus.State.class );
prototype.name().set( status );
prototype.creationDate().set( new Date() );
return builder.newInstance();
}
private AlarmEvent createEvent( Identity alarmId,
AlarmStatus oldStatus,
AlarmStatus newStatus,
String eventSystemName
)
{
ValueBuilder<AlarmEvent> builder = vbf.newValueBuilder( AlarmEvent.class );
AlarmEvent prototype = builder.prototype();
prototype.alarmIdentity().set( alarmId.identity().get() );
prototype.eventTime().set( new Date() );
prototype.newStatus().set( newStatus );
prototype.oldStatus().set( oldStatus );
prototype.systemName().set( eventSystemName );
return builder.newInstance();
}
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.core.marketdatasnapshot.impl;
import java.util.Map;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.threeten.bp.Instant;
import com.google.common.collect.Maps;
import com.opengamma.core.marketdatasnapshot.CurveKey;
import com.opengamma.core.marketdatasnapshot.CurveSnapshot;
import com.opengamma.core.marketdatasnapshot.NamedSnapshot;
import com.opengamma.core.marketdatasnapshot.StructuredMarketDataSnapshot;
import com.opengamma.core.marketdatasnapshot.SurfaceKey;
import com.opengamma.core.marketdatasnapshot.SurfaceSnapshot;
import com.opengamma.core.marketdatasnapshot.UnstructuredMarketDataSnapshot;
import com.opengamma.core.marketdatasnapshot.VolatilityCubeKey;
import com.opengamma.core.marketdatasnapshot.VolatilityCubeSnapshot;
import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceKey;
import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceSnapshot;
import com.opengamma.core.marketdatasnapshot.YieldCurveKey;
import com.opengamma.core.marketdatasnapshot.YieldCurveSnapshot;
import com.opengamma.id.UniqueId;
import com.opengamma.util.PublicSPI;
/**
* A snapshot of market data potentially altered by hand.
*/
@BeanDefinition
@PublicSPI
public class ManageableMarketDataSnapshot extends DirectBean implements StructuredMarketDataSnapshot {
/**
* The unique identifier of the snapshot. This must be null when adding to a master and not null when retrieved from a master.
*/
@PropertyDefinition(overrideGet = true)
private UniqueId _uniqueId;
/**
* The name of the snapshot intended for display purposes.
*/
@PropertyDefinition(overrideGet = true)
private String _name;
/**
* The name of the view on which this snapshot was based.
*/
@PropertyDefinition(overrideGet = true)
private String _basisViewName;
@PropertyDefinition(overrideGet = true)
private ManageableUnstructuredMarketDataSnapshot _globalValues;
/**
* The yield curves in this snapshot.
*/
@PropertyDefinition(overrideGet = true)
private Map<YieldCurveKey, YieldCurveSnapshot> _yieldCurves;
/**
* The yield curves in this snapshot.
*/
@PropertyDefinition(overrideGet = true)
private Map<CurveKey, CurveSnapshot> _curves;
/**
* The vol cubes in this snapshot.
*/
@PropertyDefinition(overrideGet = true)
private Map<VolatilityCubeKey, VolatilityCubeSnapshot> _volatilityCubes;
/**
* The vol surfaces in this snapshot.
*/
@PropertyDefinition(overrideGet = true)
private Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> _volatilitySurfaces = Maps.newHashMap(); // Initialize in order to handle old format
/**
* The surfaces in this snapshot.
*/
@PropertyDefinition(overrideGet = true)
private Map<SurfaceKey, SurfaceSnapshot> _surfaces;
/**
* The valuation time of the snapshot.
*/
@PropertyDefinition(get = "manual")
private Instant _valuationTime;
/**
* Creates a snapshot.
*/
public ManageableMarketDataSnapshot() {
super();
}
/**
* Creates a snapshot.
*
* @param name
* the name of the snapshot
* @param globalValues
* the snapshot for the global scope
* @param yieldCurves
* the yield curves
*/
public ManageableMarketDataSnapshot(final String name, final UnstructuredMarketDataSnapshot globalValues,
final Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves) {
_name = name;
_globalValues = globalValues != null ? new ManageableUnstructuredMarketDataSnapshot(globalValues) : null;
_yieldCurves = yieldCurves;
}
/**
* Creates a snapshot.
*
* @param name
* the name of the snapshot
* @param globalValues
* the snapshot for the global scope
* @param yieldCurves
* the yield curves
* @param volatilitySurfaces
* the volatility surfaces
* @param volatilityCubes
* the volatility cubes
*/
public ManageableMarketDataSnapshot(final String name, final UnstructuredMarketDataSnapshot globalValues,
final Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves, final Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> volatilitySurfaces,
final Map<VolatilityCubeKey, VolatilityCubeSnapshot> volatilityCubes) {
_name = name;
_globalValues = globalValues != null ? new ManageableUnstructuredMarketDataSnapshot(globalValues) : null;
_yieldCurves = yieldCurves;
_curves = null;
_volatilitySurfaces = volatilitySurfaces;
_volatilityCubes = volatilityCubes;
}
/**
* Creates a snapshot.
*
* @param name
* the name of the snapshot
* @param globalValues
* the snapshot for the global scope
* @param yieldCurves
* the yield curves
* @param curves
* the curves
* @param volatilitySurfaces
* the volatility surfaces
* @param volatilityCubes
* the volatility cubes
*/
public ManageableMarketDataSnapshot(final String name, final UnstructuredMarketDataSnapshot globalValues,
final Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves, final Map<CurveKey, CurveSnapshot> curves,
final Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> volatilitySurfaces, final Map<VolatilityCubeKey, VolatilityCubeSnapshot> volatilityCubes) {
_name = name;
_globalValues = globalValues != null ? new ManageableUnstructuredMarketDataSnapshot(globalValues) : null;
_yieldCurves = yieldCurves;
_curves = curves;
_volatilitySurfaces = volatilitySurfaces;
_volatilityCubes = volatilityCubes;
}
/**
* Creates a snapshot.
*
* @param name
* the name of the snapshot
* @param globalValues
* the snapshot for the global scope
* @param yieldCurves
* the yield curves
* @param curves
* the curves
* @param volatilitySurfaces
* the volatility surfaces
* @param volatilityCubes
* the volatility cubes
* @param surfaces
* the surfaces
*/
public ManageableMarketDataSnapshot(final String name, final UnstructuredMarketDataSnapshot globalValues,
final Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves, final Map<CurveKey, CurveSnapshot> curves,
final Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> volatilitySurfaces,
final Map<VolatilityCubeKey, VolatilityCubeSnapshot> volatilityCubes,
final Map<SurfaceKey, SurfaceSnapshot> surfaces) {
_name = name;
_globalValues = globalValues != null ? new ManageableUnstructuredMarketDataSnapshot(globalValues) : null;
_yieldCurves = yieldCurves;
_curves = curves;
_volatilitySurfaces = volatilitySurfaces;
_volatilityCubes = volatilityCubes;
_surfaces = surfaces;
}
public ManageableMarketDataSnapshot(final StructuredMarketDataSnapshot copyFrom) {
this(copyFrom.getName(), copyFrom.getGlobalValues(), copyFrom.getYieldCurves(), copyFrom.getCurves(), copyFrom.getVolatilitySurfaces(),
copyFrom.getVolatilityCubes(), copyFrom.getSurfaces());
_basisViewName = copyFrom.getBasisViewName();
_valuationTime = copyFrom.getValuationTime();
}
@Override
public NamedSnapshot withUniqueId(final UniqueId uniqueId) {
// As this is a mutable object, we just update and return it
setUniqueId(uniqueId);
return this;
}
/**
* Gets the valuation time of the snapshot.
*
* @return the valuation time of the snapshot, or null if no valuation time could be inferred from the snapshot
*/
@Override
public Instant getValuationTime() {
// [PLAT-1393] manual implementation provided for backwards compatibility.
// older versions took the valuation time from the yieldCurves
if (_valuationTime != null) {
return _valuationTime;
}
Instant latestTimestamp = null;
final Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves = getYieldCurves();
if (yieldCurves != null) {
for (final YieldCurveSnapshot yieldCurveSnapshot : yieldCurves.values()) {
if (latestTimestamp == null || latestTimestamp.isBefore(yieldCurveSnapshot.getValuationTime())) {
latestTimestamp = yieldCurveSnapshot.getValuationTime();
}
}
}
final Map<CurveKey, CurveSnapshot> curves = getCurves();
if (curves != null) {
for (final CurveSnapshot curveSnapshot : curves.values()) {
if (latestTimestamp == null || latestTimestamp.isBefore(curveSnapshot.getValuationTime())) {
latestTimestamp = curveSnapshot.getValuationTime();
}
}
}
return latestTimestamp;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code ManageableMarketDataSnapshot}.
* @return the meta-bean, not null
*/
public static ManageableMarketDataSnapshot.Meta meta() {
return ManageableMarketDataSnapshot.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ManageableMarketDataSnapshot.Meta.INSTANCE);
}
@Override
public ManageableMarketDataSnapshot.Meta metaBean() {
return ManageableMarketDataSnapshot.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the unique identifier of the snapshot. This must be null when adding to a master and not null when retrieved from a master.
* @return the value of the property
*/
@Override
public UniqueId getUniqueId() {
return _uniqueId;
}
/**
* Sets the unique identifier of the snapshot. This must be null when adding to a master and not null when retrieved from a master.
* @param uniqueId the new value of the property
*/
public void setUniqueId(UniqueId uniqueId) {
this._uniqueId = uniqueId;
}
/**
* Gets the the {@code uniqueId} property.
* @return the property, not null
*/
public final Property<UniqueId> uniqueId() {
return metaBean().uniqueId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the name of the snapshot intended for display purposes.
* @return the value of the property
*/
@Override
public String getName() {
return _name;
}
/**
* Sets the name of the snapshot intended for display purposes.
* @param name the new value of the property
*/
public void setName(String name) {
this._name = name;
}
/**
* Gets the the {@code name} property.
* @return the property, not null
*/
public final Property<String> name() {
return metaBean().name().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the name of the view on which this snapshot was based.
* @return the value of the property
*/
@Override
public String getBasisViewName() {
return _basisViewName;
}
/**
* Sets the name of the view on which this snapshot was based.
* @param basisViewName the new value of the property
*/
public void setBasisViewName(String basisViewName) {
this._basisViewName = basisViewName;
}
/**
* Gets the the {@code basisViewName} property.
* @return the property, not null
*/
public final Property<String> basisViewName() {
return metaBean().basisViewName().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the globalValues.
* @return the value of the property
*/
@Override
public ManageableUnstructuredMarketDataSnapshot getGlobalValues() {
return _globalValues;
}
/**
* Sets the globalValues.
* @param globalValues the new value of the property
*/
public void setGlobalValues(ManageableUnstructuredMarketDataSnapshot globalValues) {
this._globalValues = globalValues;
}
/**
* Gets the the {@code globalValues} property.
* @return the property, not null
*/
public final Property<ManageableUnstructuredMarketDataSnapshot> globalValues() {
return metaBean().globalValues().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the yield curves in this snapshot.
* @return the value of the property
*/
@Override
public Map<YieldCurveKey, YieldCurveSnapshot> getYieldCurves() {
return _yieldCurves;
}
/**
* Sets the yield curves in this snapshot.
* @param yieldCurves the new value of the property
*/
public void setYieldCurves(Map<YieldCurveKey, YieldCurveSnapshot> yieldCurves) {
this._yieldCurves = yieldCurves;
}
/**
* Gets the the {@code yieldCurves} property.
* @return the property, not null
*/
public final Property<Map<YieldCurveKey, YieldCurveSnapshot>> yieldCurves() {
return metaBean().yieldCurves().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the yield curves in this snapshot.
* @return the value of the property
*/
@Override
public Map<CurveKey, CurveSnapshot> getCurves() {
return _curves;
}
/**
* Sets the yield curves in this snapshot.
* @param curves the new value of the property
*/
public void setCurves(Map<CurveKey, CurveSnapshot> curves) {
this._curves = curves;
}
/**
* Gets the the {@code curves} property.
* @return the property, not null
*/
public final Property<Map<CurveKey, CurveSnapshot>> curves() {
return metaBean().curves().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the vol cubes in this snapshot.
* @return the value of the property
*/
@Override
public Map<VolatilityCubeKey, VolatilityCubeSnapshot> getVolatilityCubes() {
return _volatilityCubes;
}
/**
* Sets the vol cubes in this snapshot.
* @param volatilityCubes the new value of the property
*/
public void setVolatilityCubes(Map<VolatilityCubeKey, VolatilityCubeSnapshot> volatilityCubes) {
this._volatilityCubes = volatilityCubes;
}
/**
* Gets the the {@code volatilityCubes} property.
* @return the property, not null
*/
public final Property<Map<VolatilityCubeKey, VolatilityCubeSnapshot>> volatilityCubes() {
return metaBean().volatilityCubes().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the vol surfaces in this snapshot.
* @return the value of the property
*/
@Override
public Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> getVolatilitySurfaces() {
return _volatilitySurfaces;
}
/**
* Sets the vol surfaces in this snapshot.
* @param volatilitySurfaces the new value of the property
*/
public void setVolatilitySurfaces(Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> volatilitySurfaces) {
this._volatilitySurfaces = volatilitySurfaces;
}
/**
* Gets the the {@code volatilitySurfaces} property.
* @return the property, not null
*/
public final Property<Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot>> volatilitySurfaces() {
return metaBean().volatilitySurfaces().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the surfaces in this snapshot.
* @return the value of the property
*/
@Override
public Map<SurfaceKey, SurfaceSnapshot> getSurfaces() {
return _surfaces;
}
/**
* Sets the surfaces in this snapshot.
* @param surfaces the new value of the property
*/
public void setSurfaces(Map<SurfaceKey, SurfaceSnapshot> surfaces) {
this._surfaces = surfaces;
}
/**
* Gets the the {@code surfaces} property.
* @return the property, not null
*/
public final Property<Map<SurfaceKey, SurfaceSnapshot>> surfaces() {
return metaBean().surfaces().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Sets the valuation time of the snapshot.
* @param valuationTime the new value of the property
*/
public void setValuationTime(Instant valuationTime) {
this._valuationTime = valuationTime;
}
/**
* Gets the the {@code valuationTime} property.
* @return the property, not null
*/
public final Property<Instant> valuationTime() {
return metaBean().valuationTime().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public ManageableMarketDataSnapshot clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
ManageableMarketDataSnapshot other = (ManageableMarketDataSnapshot) obj;
return JodaBeanUtils.equal(getUniqueId(), other.getUniqueId()) &&
JodaBeanUtils.equal(getName(), other.getName()) &&
JodaBeanUtils.equal(getBasisViewName(), other.getBasisViewName()) &&
JodaBeanUtils.equal(getGlobalValues(), other.getGlobalValues()) &&
JodaBeanUtils.equal(getYieldCurves(), other.getYieldCurves()) &&
JodaBeanUtils.equal(getCurves(), other.getCurves()) &&
JodaBeanUtils.equal(getVolatilityCubes(), other.getVolatilityCubes()) &&
JodaBeanUtils.equal(getVolatilitySurfaces(), other.getVolatilitySurfaces()) &&
JodaBeanUtils.equal(getSurfaces(), other.getSurfaces()) &&
JodaBeanUtils.equal(getValuationTime(), other.getValuationTime());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getUniqueId());
hash = hash * 31 + JodaBeanUtils.hashCode(getName());
hash = hash * 31 + JodaBeanUtils.hashCode(getBasisViewName());
hash = hash * 31 + JodaBeanUtils.hashCode(getGlobalValues());
hash = hash * 31 + JodaBeanUtils.hashCode(getYieldCurves());
hash = hash * 31 + JodaBeanUtils.hashCode(getCurves());
hash = hash * 31 + JodaBeanUtils.hashCode(getVolatilityCubes());
hash = hash * 31 + JodaBeanUtils.hashCode(getVolatilitySurfaces());
hash = hash * 31 + JodaBeanUtils.hashCode(getSurfaces());
hash = hash * 31 + JodaBeanUtils.hashCode(getValuationTime());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(352);
buf.append("ManageableMarketDataSnapshot{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("uniqueId").append('=').append(JodaBeanUtils.toString(getUniqueId())).append(',').append(' ');
buf.append("name").append('=').append(JodaBeanUtils.toString(getName())).append(',').append(' ');
buf.append("basisViewName").append('=').append(JodaBeanUtils.toString(getBasisViewName())).append(',').append(' ');
buf.append("globalValues").append('=').append(JodaBeanUtils.toString(getGlobalValues())).append(',').append(' ');
buf.append("yieldCurves").append('=').append(JodaBeanUtils.toString(getYieldCurves())).append(',').append(' ');
buf.append("curves").append('=').append(JodaBeanUtils.toString(getCurves())).append(',').append(' ');
buf.append("volatilityCubes").append('=').append(JodaBeanUtils.toString(getVolatilityCubes())).append(',').append(' ');
buf.append("volatilitySurfaces").append('=').append(JodaBeanUtils.toString(getVolatilitySurfaces())).append(',').append(' ');
buf.append("surfaces").append('=').append(JodaBeanUtils.toString(getSurfaces())).append(',').append(' ');
buf.append("valuationTime").append('=').append(JodaBeanUtils.toString(getValuationTime())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ManageableMarketDataSnapshot}.
*/
public static class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code uniqueId} property.
*/
private final MetaProperty<UniqueId> _uniqueId = DirectMetaProperty.ofReadWrite(
this, "uniqueId", ManageableMarketDataSnapshot.class, UniqueId.class);
/**
* The meta-property for the {@code name} property.
*/
private final MetaProperty<String> _name = DirectMetaProperty.ofReadWrite(
this, "name", ManageableMarketDataSnapshot.class, String.class);
/**
* The meta-property for the {@code basisViewName} property.
*/
private final MetaProperty<String> _basisViewName = DirectMetaProperty.ofReadWrite(
this, "basisViewName", ManageableMarketDataSnapshot.class, String.class);
/**
* The meta-property for the {@code globalValues} property.
*/
private final MetaProperty<ManageableUnstructuredMarketDataSnapshot> _globalValues = DirectMetaProperty.ofReadWrite(
this, "globalValues", ManageableMarketDataSnapshot.class, ManageableUnstructuredMarketDataSnapshot.class);
/**
* The meta-property for the {@code yieldCurves} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<YieldCurveKey, YieldCurveSnapshot>> _yieldCurves = DirectMetaProperty.ofReadWrite(
this, "yieldCurves", ManageableMarketDataSnapshot.class, (Class) Map.class);
/**
* The meta-property for the {@code curves} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<CurveKey, CurveSnapshot>> _curves = DirectMetaProperty.ofReadWrite(
this, "curves", ManageableMarketDataSnapshot.class, (Class) Map.class);
/**
* The meta-property for the {@code volatilityCubes} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<VolatilityCubeKey, VolatilityCubeSnapshot>> _volatilityCubes = DirectMetaProperty.ofReadWrite(
this, "volatilityCubes", ManageableMarketDataSnapshot.class, (Class) Map.class);
/**
* The meta-property for the {@code volatilitySurfaces} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot>> _volatilitySurfaces = DirectMetaProperty.ofReadWrite(
this, "volatilitySurfaces", ManageableMarketDataSnapshot.class, (Class) Map.class);
/**
* The meta-property for the {@code surfaces} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<SurfaceKey, SurfaceSnapshot>> _surfaces = DirectMetaProperty.ofReadWrite(
this, "surfaces", ManageableMarketDataSnapshot.class, (Class) Map.class);
/**
* The meta-property for the {@code valuationTime} property.
*/
private final MetaProperty<Instant> _valuationTime = DirectMetaProperty.ofReadWrite(
this, "valuationTime", ManageableMarketDataSnapshot.class, Instant.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"uniqueId",
"name",
"basisViewName",
"globalValues",
"yieldCurves",
"curves",
"volatilityCubes",
"volatilitySurfaces",
"surfaces",
"valuationTime");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
return _uniqueId;
case 3373707: // name
return _name;
case 858810670: // basisViewName
return _basisViewName;
case -591591771: // globalValues
return _globalValues;
case 119589713: // yieldCurves
return _yieldCurves;
case -1349116572: // curves
return _curves;
case -2137883207: // volatilityCubes
return _volatilityCubes;
case -1329840981: // volatilitySurfaces
return _volatilitySurfaces;
case -1615615642: // surfaces
return _surfaces;
case 113591406: // valuationTime
return _valuationTime;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends ManageableMarketDataSnapshot> builder() {
return new DirectBeanBuilder<ManageableMarketDataSnapshot>(new ManageableMarketDataSnapshot());
}
@Override
public Class<? extends ManageableMarketDataSnapshot> beanType() {
return ManageableMarketDataSnapshot.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code uniqueId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueId> uniqueId() {
return _uniqueId;
}
/**
* The meta-property for the {@code name} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> name() {
return _name;
}
/**
* The meta-property for the {@code basisViewName} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> basisViewName() {
return _basisViewName;
}
/**
* The meta-property for the {@code globalValues} property.
* @return the meta-property, not null
*/
public final MetaProperty<ManageableUnstructuredMarketDataSnapshot> globalValues() {
return _globalValues;
}
/**
* The meta-property for the {@code yieldCurves} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<YieldCurveKey, YieldCurveSnapshot>> yieldCurves() {
return _yieldCurves;
}
/**
* The meta-property for the {@code curves} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<CurveKey, CurveSnapshot>> curves() {
return _curves;
}
/**
* The meta-property for the {@code volatilityCubes} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<VolatilityCubeKey, VolatilityCubeSnapshot>> volatilityCubes() {
return _volatilityCubes;
}
/**
* The meta-property for the {@code volatilitySurfaces} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot>> volatilitySurfaces() {
return _volatilitySurfaces;
}
/**
* The meta-property for the {@code surfaces} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<SurfaceKey, SurfaceSnapshot>> surfaces() {
return _surfaces;
}
/**
* The meta-property for the {@code valuationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<Instant> valuationTime() {
return _valuationTime;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
return ((ManageableMarketDataSnapshot) bean).getUniqueId();
case 3373707: // name
return ((ManageableMarketDataSnapshot) bean).getName();
case 858810670: // basisViewName
return ((ManageableMarketDataSnapshot) bean).getBasisViewName();
case -591591771: // globalValues
return ((ManageableMarketDataSnapshot) bean).getGlobalValues();
case 119589713: // yieldCurves
return ((ManageableMarketDataSnapshot) bean).getYieldCurves();
case -1349116572: // curves
return ((ManageableMarketDataSnapshot) bean).getCurves();
case -2137883207: // volatilityCubes
return ((ManageableMarketDataSnapshot) bean).getVolatilityCubes();
case -1329840981: // volatilitySurfaces
return ((ManageableMarketDataSnapshot) bean).getVolatilitySurfaces();
case -1615615642: // surfaces
return ((ManageableMarketDataSnapshot) bean).getSurfaces();
case 113591406: // valuationTime
return ((ManageableMarketDataSnapshot) bean).getValuationTime();
}
return super.propertyGet(bean, propertyName, quiet);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
((ManageableMarketDataSnapshot) bean).setUniqueId((UniqueId) newValue);
return;
case 3373707: // name
((ManageableMarketDataSnapshot) bean).setName((String) newValue);
return;
case 858810670: // basisViewName
((ManageableMarketDataSnapshot) bean).setBasisViewName((String) newValue);
return;
case -591591771: // globalValues
((ManageableMarketDataSnapshot) bean).setGlobalValues((ManageableUnstructuredMarketDataSnapshot) newValue);
return;
case 119589713: // yieldCurves
((ManageableMarketDataSnapshot) bean).setYieldCurves((Map<YieldCurveKey, YieldCurveSnapshot>) newValue);
return;
case -1349116572: // curves
((ManageableMarketDataSnapshot) bean).setCurves((Map<CurveKey, CurveSnapshot>) newValue);
return;
case -2137883207: // volatilityCubes
((ManageableMarketDataSnapshot) bean).setVolatilityCubes((Map<VolatilityCubeKey, VolatilityCubeSnapshot>) newValue);
return;
case -1329840981: // volatilitySurfaces
((ManageableMarketDataSnapshot) bean).setVolatilitySurfaces((Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot>) newValue);
return;
case -1615615642: // surfaces
((ManageableMarketDataSnapshot) bean).setSurfaces((Map<SurfaceKey, SurfaceSnapshot>) newValue);
return;
case 113591406: // valuationTime
((ManageableMarketDataSnapshot) bean).setValuationTime((Instant) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package org.paninij.systemgraph;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
import javax.tools.JavaFileObject;
import com.sun.tools.javac.util.Assert;
import com.sun.tools.javac.util.List;
import boa.types.Ast.Modifier.Visibility;
import boa.types.Ast.*;
/**
*
* @author ganeshau, yuheng
*
*/
public class SystemGraph {
public static class Node {
public Set<Method> procedures = new HashSet<Method>();
public HashMap<String, Node> connections = new HashMap<String, Node>();
public Declaration capsule;
public String name; // name of the capsule instance
public Node(String name, Declaration capsuleDecl) {
this.capsule = capsuleDecl;
this.name = name;
for (Method method : capsuleDecl.getMethodsList()) {
addProc(method);
}
}
private void addProc(Method method) {
procedures.add(method);
}
public void addConnection(String name, Node node) {
connections.put(name, node);
}
public String toString() {
String str = capsule.getName() + " " + name + " {";
for (Method m : procedures) {
str += m.toString() + ",";
}
str += "}";
return str;
}
}
public static class Edge {
public final Node fromNode, toNode;
public final Method fromProcedure, toProcedure;
// the source code postion of this call edge
public final int pos, line;
// the source code statement of this call edge
public final Expression tree;
// the source file that contains this effect
public final JavaFileObject src;
public Edge(Node fromNode, Method fromProcedure, Node toNode,
Method toProcedure, int pos, int line, Expression tree,
JavaFileObject src) {
this.fromNode = fromNode;
this.fromProcedure = fromProcedure;
this.toNode = toNode;
this.toProcedure = toProcedure;
this.pos = pos;
this.line = line;
this.tree = tree;
this.src = src;
}
public String toString() {
StringBuilder sb = new StringBuilder();
toString(sb);
return sb.toString();
}
public void toString(StringBuilder sb) {
sb.append(fromNode.name);
sb.append(".");
sb.append(fromProcedure);
sb.append(" --> ");
sb.append(toNode.name);
sb.append(".");
sb.append(toProcedure);
sb.append("\n");
}
public final int hashCode() {
return fromNode.hashCode() + toNode.hashCode()
+ fromProcedure.hashCode() + toProcedure.hashCode() + pos;
}
public final boolean equals(Object obj) {
if (obj instanceof Edge) {
Edge other = (Edge) obj;
return fromNode.equals(other.fromNode)
&& toNode.equals(other.toNode)
&& fromProcedure.equals(other.fromProcedure)
&& toProcedure.equals(other.toProcedure)
&& pos == other.pos;
}
return false;
}
}
public static class Path {
public List<Node> nodes;
public Path() {
nodes = List.<Node> nil();
}
public Path(Node node) {
nodes = List.<Node> nil();
nodes = nodes.append(node);
}
public Path(Path path) {
nodes = List.<Node> nil();
nodes = nodes.appendList(path.nodes);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
toString(sb);
return sb.toString();
}
public void toString(StringBuilder sb) {
sb.append(nodes.get(0).name);
final int size = nodes.size();
for (int i = 1; i < size; i++) {
sb.append(" --> ");
sb.append(nodes.get(i).name);
}
}
}
public HashMap<String, Node> nodes = new HashMap<String, Node>();
public Set<Edge> edges = new HashSet<Edge>();
// this is to save size of arrays. maybe view arrays as an whole instead.
public HashMap<String, Integer> capsuleArrays = new HashMap<String, Integer>();
public void addNode(String name, Declaration sym) {
Assert.check(!nodes.containsKey(name),
"Graph already contains node for " + name);
nodes.put(name, new Node(name, sym));
}
public void setConnection(String fromNode, String alias, String toNode) {
if (!toNode.toString().equals("null")) {
nodes.get(fromNode).addConnection(alias, nodes.get(toNode));
} else
nodes.get(fromNode).addConnection(alias, null);
}
public void setEdge(Node fromNode, Method fromProc, Node toNode, Method toProc,
int pos, int line, Expression tree, JavaFileObject src) {
edges.add(new Edge(fromNode, fromProc, toNode, toProc, pos, line, tree,
src));
}
public String toString() {
StringBuilder s = new StringBuilder();
s.append("Nodes: \n");
for (Node node : nodes.values()) {
s.append("\t");
s.append(node);
s.append("\n");
}
s.append("Connections: \n");
for (Node node : nodes.values()) {
s.append("\tNode ");
s.append(node.name);
s.append(":\n");
for (Entry<String, Node> c : node.connections.entrySet()) {
s.append("\t\t");
s.append(c.getKey());
s.append(" --> ");
if (c.getValue() != null)
s.append(c.getValue().name);
else
s.append("null");
s.append("\n");
;
}
}
s.append("Edges: \n");
for (Edge edge : edges) {
if (edge.fromProcedure.toString().contains("$Original")
|| edge.toProcedure.toString().contains("$Original")
|| !isPublic(edge.fromProcedure, edge.toProcedure))
continue;
edge.toString(s);
}
return s.toString();
}
public List<Edge> getEdges(Node head, Method fromSym, List<Node> tail) {
List<Edge> edges = List.<Edge> nil();
for (Edge e : this.edges) {
if (e.fromNode == head
&& e.fromProcedure.toString().equals(fromSym.toString())
&& e.toNode == tail.head) {
edges = edges.append(e);
}
}
return edges;
}
//TODO: detectCyclicReferences is not implemented for now.
private boolean isPublic(Method fromProcedure, Method toProcedure) {
if (isPublic(fromProcedure) && isPublic(toProcedure))
return true;
return false;
}
private boolean isPublic(Method procedure) {
for (Modifier modifier : procedure.getModifiersList()) {
if (modifier.getVisibility() == Visibility.PUBLIC)
return true;
}
return false;
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2014, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2013/12/26 Martin D. Flynn
// - Initial release
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.io.*;
import java.util.*;
/**
*** Temperature container
**/
public class Temperature
{
// ------------------------------------------------------------------------
public static final long MINIMUM_TIMESTAMP = 1L;
// ------------------------------------------------------------------------
public static final double INVALID_TEMPERATURE = -9999.0;
public static final double TEMPERATURE_LIMIT_LO = -273.15; // degrees C (Kelvin)
public static final double TEMPERATURE_LIMIT_HI = 1000.0; // degrees C
/**
*** Returns true if the specified Celsius temperature is within a valid range
**/
public static boolean isValidTemperature(double C)
{
return ((C >= TEMPERATURE_LIMIT_LO) && (C <= TEMPERATURE_LIMIT_HI));
}
/**
*** Returns true if the specified Celsius temperature is within a valid range
**/
public static boolean isValidTemperature(Temperature T)
{
if (T == null) {
return false;
} else {
return T.isValidTemperature();
}
}
/**
*** Returns true if the specified Celsius temperature is within a valid range
**/
public static boolean isValid(Temperature T)
{
if (T == null) {
return false;
} else {
return T.isValid();
}
}
// ------------------------------------------------------------------------
/**
*** Fahrenheit to Celsius
*** @param F Fahrenheit temperature
*** @return Celsius temperature
**/
public static double F2C(double F)
{
return (F - 32.0) * 5.0 / 9.0;
}
/**
*** Celsius to Fahrenheit
*** @param C Celsius temperature
*** @return Fahrenheit temperature
**/
public static double C2F(double C)
{
return (C * 9.0 / 5.0) + 32.0;
}
// ------------------------------------------------------------------------
/**
*** Enum: Temperature Units
**/
public enum TemperatureUnits implements EnumTools.StringLocale, EnumTools.IntValue {
F (0, I18N.getString(Temperature.class,"Temperature.f","F")), // Fahrenheit
C (1, I18N.getString(Temperature.class,"Temperature.c","C")); // Celsius (default)
// ---
private int vv = 0;
private I18N.Text aa = null;
TemperatureUnits(int v, I18N.Text a) { vv=v; aa=a; }
public int getIntValue() { return vv; }
public String toString() { return aa.toString(); }
public String toString(Locale loc) { return aa.toString(loc); }
public double convertFromC(double c) { return this.equals(F)? C2F(c) : c; }
public double convertToC(double c) { return this.equals(F)? F2C(c) : c; }
};
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private long timestamp = 0L;
private double tempC = -999.0;
/**
*** Temperature constructor
*** @param ts The timestamp (in seconds)
*** @param C The temperature (in Celsius)
**/
public Temperature(long ts, double C)
{
this.timestamp = ts;
this.tempC = C;
}
// ------------------------------------------------------------------------
/**
*** Gets the temperature in Celsius units
**/
public double getTemperatureC()
{
return this.tempC;
}
/**
*** Gets the temperature in Fahrenheit units
**/
public double getTemperatureF()
{
return C2F(this.tempC);
}
/**
*** Gets the temperature
*** @param F true for Fahrenheit, false for Celsius
*** @return The temperature
**/
public double getTemperature(boolean F)
{
return F? this.getTemperatureF() : this.getTemperatureC();
}
/**
*** Returns true if the temperature is valid
**/
public boolean isValidTemperature()
{
return Temperature.isValidTemperature(this.getTemperatureC());
}
// ------------------------------------------------------------------------
/**
*** Gets the timestamp (in seconds)
**/
public long getTimestamp()
{
return this.timestamp;
}
/**
*** Gets the timestamp (in milliseconds)
**/
public long getTimestampMillis()
{
return this.timestamp * 1000L;
}
/**
*** Returns true if the timestamp is valid
**/
public boolean isValidTimestamp()
{
long ts = this.getTimestamp();
return (ts >= MINIMUM_TIMESTAMP);
}
// ------------------------------------------------------------------------
/**
*** Returns true if the timestamp and temperature are valid
**/
public boolean isValid()
{
return (this.isValidTimestamp() && this.isValidTemperature());
}
// ------------------------------------------------------------------------
/**
*** Gets the String representation of this instance
**/
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append(this.getTimestamp());
sb.append(",");
sb.append(this.getTemperatureC());
return sb.toString();
}
}
| |
import java.util.Random;
public class BinaryMatrixHelper {
public static boolean[] multiply(boolean[][] matrix, boolean[] vector){
if(matrix == null || vector == null){
return null;
}
if(matrix[0].length != vector.length){
return null;
}
int n = matrix[0].length;
int m = matrix.length;
boolean[] result = new boolean[m];
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
result[i] = result[i] ^ (matrix[i][j] & vector[j]);
}
}
return result;
}
public static boolean[][] copyMatrix(boolean[][] old){
return copyMatrix(old, old.length);
}
public static boolean[][] copyMatrix(boolean[][] old, int upToRow){
int m = Math.min(old.length, upToRow);
if(m==0){
return null;
}
int n = old[0].length;
boolean[][] duplicate = new boolean[m][n];
for(int i=0; i<m; i++){
for(int j=0; j<n; j++){
duplicate[i][j]=old[i][j];
}
}
return duplicate;
}
public static boolean[][] getFullRankRegularMatrix(int size){
if(size==0){
return null;
}
boolean[][] matrix;
do{
// System.out.println/(rank);
matrix = getRandomRegularMatrix(size,size);
// printMatrix(matrix);
}while(gaussJordanElimination(copyMatrix(matrix),size)<size);
return matrix;
}
public static boolean[][] getFullRankMatrix(int size){
if(size==0){
return null;
}
boolean[][] matrix;
do{
matrix = getRandomMatrix(size,size);
}while(gaussJordanElimination(copyMatrix(matrix),size)<size);
return matrix;
}
/**
* given a vector v, "project" v into the span of the first reducedDim columns of matrix
* @param matrix
* @param reducedDim
* @param vec
*/
public static boolean[] decode(boolean[][] matrix, boolean[][] superMatrix, int reducedDim, boolean[] x, boolean[] offset){
if(matrix==null || superMatrix==null || matrix.length==0 || x==null || offset==null
|| x.length!= offset.length || matrix[0].length!= x.length
|| superMatrix.length!=matrix.length+1){
return null;
}
if(isXInRange(x, matrix, offset)){
return null;
}
boolean[] xPlusOffset = addVector(x,offset);
boolean[] temp=null;
boolean[] solution=null;
gaussJordanElimination(matrix, reducedDim);
gaussJordanElimination(superMatrix, reducedDim+1);
// printMatrix(superMatrix);
while(solution ==null){
boolean[] basis = superMatrix[(int) ((reducedDim+1)*Math.random())];
temp = addVector(xPlusOffset, basis);
solution = findSolution(temp, matrix);
}
BinaryMatrixHelper.printVector(addVector(temp,offset));
return solution;
}
public static boolean[] addVector(boolean[] x, boolean[] offset){
if(x==null || offset==null
|| x.length!= offset.length){
return null;
}
boolean[] xPlusOffset = new boolean[x.length];
for(int i=0;i<x.length;i++){
xPlusOffset[i]=x[i]^offset[i];
}
return xPlusOffset;
}
public static void shuffleColumns(boolean[][] matrix){
if(matrix==null || matrix.length==0)
return;
int n=matrix[0].length;
for(int i=n-1;i>0;i--){
int j = (int) (Math.random()*(i+1));
swapColumns(i,j,matrix);
}
}
public static boolean[][] parityToGenerator(boolean[][] h_){
boolean[][] h = BinaryMatrixHelper.copyMatrix(h_);
if(h==null){
return null;
}
int d=h.length;
int n=h[0].length;
int rank=BinaryMatrixHelper.gaussJordanElimination(h);
if(rank>=n){
return null;
}
//dimension of generator matrix = n - rank(h)
boolean[][] g=new boolean[n-rank][n];
//find all pivots
//pivotsOf[i]=j => the pivot of the ith row is variable j
int[] pivotsOf= new int[d];
boolean[] pivots = new boolean[n];
for(int i=0;i<d;i++){
boolean pivotFound=false;
for(int j=0;j<n;j++){
if(h[i][j] && !pivotFound){
//variable j is a pivot of the ith row
pivots[j]=true;
pivotsOf[i]=j;
pivotFound=true;
}
}
}
int basisCounter=0;
for(int j=0;j<n;j++){
if(pivots[j]){
continue;
}
g[basisCounter][j]=true;
for(int i=0;i<d;i++){
if(h[i][j])
g[basisCounter][pivotsOf[i]]=true;
}
basisCounter++;
}
return g;
}
private static void swapColumns(int i, int j, boolean[][] matrix){
int m = matrix.length;
if(m==0){
// do nothing
return;
}
int n = matrix[0].length;
if(i>=n || j>=n || i==j){
// do nothing
return;
}
boolean temp;
for(int k=0;k<m;k++){
temp = matrix[k][i];
matrix[k][i] = matrix[k][j];
matrix[k][j] = temp;
}
}
private static void swapRows(int i, int j, boolean[][] matrix){
int m = matrix.length;
if(m==0 || i>=m || j>=m || i==j){
// do nothing
return;
}
int n = matrix[0].length;
boolean temp;
for(int k=0;k<n;k++){
temp = matrix[i][k];
matrix[i][k] = matrix[j][k];
matrix[j][k] = temp;
}
}
public static boolean[][] getRandomRegularMatrix(int m, int n){
return getRandomRegularMatrix(m,n,n/2);
}
public static boolean[][] getRandomRegularMatrix(int m, int n, int k){
if(m<=0 || n <=0 || k>n){
return null;
}
boolean[][] matrix = new boolean[m][n];
int[] indexes = new int[n];
for(int i=0;i<n;i++){
indexes[i]=i;
}
for(int i=0;i<m;i++){
shuffle(indexes);
for(int j=0;j<k;j++){
matrix[i][indexes[j]]=true;
}
}
return matrix;
}
public static void shuffle(int[] indexes){
if(indexes==null)
return;
int n=indexes.length;
for(int i=n-1;i>0;i--){
int j = (int) (Math.random()*(i+1));
int temp = indexes[j];
indexes[j] = indexes[i];
indexes[i] = temp;
}
}
public static boolean[][] getRandomMatrix(int m, int n){
return getRandomMatrix(m,n,0.5);
}
public static boolean[][] getRandomMatrix(int m, int n, double f){
if(m<=0 || n <=0){
return null;
}
boolean[][] matrix = new boolean[m][n];
Random rand = new Random();
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
// matrix[i][j]=rand.nextBoolean();
matrix[i][j]=rand.nextDouble()>1-f;
}
}
return matrix;
}
public static boolean[] or(boolean[] x, boolean[] y){
if(x==null || y==null || x.length!=y.length)
return null;
boolean[] result = new boolean[x.length];
for(int i=0;i<x.length;i++){
result[i]=x[i]|y[i];
}
return result;
}
public static boolean[] and(boolean[] x, boolean[] y){
if(x==null || y==null || x.length!=y.length)
return null;
boolean[] result = new boolean[x.length];
for(int i=0;i<x.length;i++){
result[i]=x[i]&y[i];
}
return result;
}
public static boolean[][] getRandomMatrix(int m, int n, double f, int seed){
Random rand = new Random(seed);
if(m<=0 || n <=0){
return null;
}
boolean[][] matrix = new boolean[m][n];
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
matrix[i][j]=rand.nextDouble()>1-f;
}
}
return matrix;
}
public static boolean[] getRandomVector(int n){
if(n <=0){
return null;
}
boolean[] vector = new boolean[n];
Random rand = new Random();
for(int i=0;i<n;i++){
vector[i]=rand.nextBoolean();
}
return vector;
}
/**
* same as gaussJordanElimination(boolean[][] matrix, int upToRow, int upToCol) but on full matrix
* @param matrix boolean matrix
* @return rank
*/
public static int gaussJordanElimination(boolean[][] matrix){
if(matrix==null){
return -1;
}
return gaussJordanElimination(matrix,matrix.length,matrix[0].length);
}
/**
* same as gaussJordanElimination(boolean[][] matrix, int upToRow, int upToCol) but with all the columns
* @param matrix boolean matrix
* @return rank
*/
public static int gaussJordanElimination(boolean[][] matrix, int upToRow){
if(matrix==null){
return -1;
}
return gaussJordanElimination(matrix,upToRow,matrix[0].length);
}
/**
* Perform Gauss-Jordan elimination on matrix. Note that the matrix passed in will be changed to rref in the process so you may want to save a copy.
* @param matrix boolean matrix
* @param upToRow ignore the rows after upToRow (1 base)
* @param upToCol ignore the columns after upToCol (1 base)
* @return the rank of the matrix
*/
public static int gaussJordanElimination(boolean[][] matrix, int upToRow, int upToCol){
if(matrix==null){
return -1;
}
int m,n;
m = Math.min(upToRow, matrix.length);
if(m<1){
return 0;
}
if(m==1){
return 1;
}
n = Math.min(upToCol, matrix[0].length);
int pivot=0;
int startRow=0;
while(pivot<n && startRow<m){
//find leading one
int leadingRow=pickLeadingOne(startRow, pivot, m, n, matrix);
if(leadingRow<0){
pivot++;
continue;
}else{
//swap
swapRows(startRow, leadingRow, matrix);
//elimination
eliminate(startRow, pivot, m, matrix);
pivot++;
startRow++;
}
}
return startRow;
}
private static int pickLeadingOne(int startRow, int pivot, int m, int n, boolean[][] matrix){
int leadingRow=-1;
for(int i=startRow;i<m;i++){
if(matrix[i][pivot]){
leadingRow=i;
break;
}
}
return leadingRow;
}
// private static int pickSparseLeadingOne(int startRow, int pivot, int m, int n, boolean[][] matrix){
// int min=m*n;
// int minIdx=-1;
// for(int i=startRow;i<m;i++){
// if(matrix[i][pivot]){
// if(minIdx<0){
// minIdx=i;
// continue;
// }
//
// int sum=0;
// for(int ri=0;ri<m;ri++){
// for(int rj=pivot;rj<n;rj++){
// if(matrix[ri][pivot]){
// if(matrix[i][rj]^matrix[ri][rj]){
// sum ++;
// }
// }else if(matrix[ri][rj]){
// sum ++;
// }
// }
// }
// System.out.println("sum"+sum);
// System.out.println("min"+min);
// if(sum<min){
// min=sum;
// minIdx=i;
// }
// }
// }
// System.out.println("sparse:"+minIdx);
// return minIdx;
// }
//
private static void eliminate(int pr, int pc, int m, boolean[][] matrix){
int n;
n=matrix[0].length;
for(int i=0;i<m;i++){
//skip those rows that we don't care
if(i==pr || !matrix[i][pc]){
continue;
}
matrix[i][pc]=false;
for(int j=pc+1;j<n;j++){
matrix[i][j]= matrix[pr][j]^matrix[i][j];
}
}
}
/**
* given a vector x, find a y such that matrix*y = x
* @param x
* @param matrix
* @return return y if y exists. o.w. return null
*/
public static boolean[] findFullSolution(boolean[] x, boolean[][] matrix){
System.out.println(matrix.length);
System.out.println(matrix[0].length);
System.out.println(x.length);
if(x==null || matrix==null){
return null;
}
if(x.length!=matrix[0].length){
return null;
}
//transpose
boolean[][] mt = transpose(matrix);
//augmented (add x as the lsat column)
boolean[][] augMt = augmentedMatrix(mt, x);
if(augMt==null){
return null;
}
int numCol=mt[0].length;
//GJ-eliminatation
gaussJordanElimination(augMt, augMt.length, numCol);
boolean[] solution=new boolean[numCol];
for(int i=0;i<augMt.length;i++){
boolean pivotFound =false;
for(int j=0;j<numCol;j++){
if(!pivotFound && augMt[i][j]){
pivotFound=true;
solution[j]=augMt[i][numCol];
}
}
}
return solution;
}
/**
* given a vector x, find a y such that matrix*y = x
* @param x
* @param matrix
* @return return y if y exists. o.w. return null
*/
public static boolean[] findSolution(boolean[] x, boolean[][] matrix){
if(x==null || matrix==null){
return null;
}
if(x.length!=matrix[0].length){
return null;
}
//transpose
boolean[][] mt = transpose(matrix);
//augmented (add x as the lsat column)
boolean[][] augMt = augmentedMatrix(mt, x);
if(augMt==null){
return null;
}
int numCol=mt[0].length;
//GJ-eliminatation
int rank=gaussJordanElimination(augMt, augMt.length, numCol);
boolean[] solution=new boolean[rank];
for(int i=0;i<augMt.length;i++){
if(augMt[i][numCol]){
if(i<rank){
solution[i]=true;
}else{
return null;
}
}
}
return solution;
}
/**
* given a vector x, test if there exists a y such that matrix*y + offset = x
* @param x
* @param matrix
* @param offset
* @return true if such a y vector exists
*/
public static boolean isXInRange(boolean[] x, boolean[][] matrix, boolean[] offset){
if(x==null || offset==null){
return false;
}
if( x.length!=offset.length){
return false;
}
//compute x'=x+offset and then test if there exists a solution y that matrix*y=x'
boolean[] xPlusOffset = new boolean[x.length];
for(int i=0;i<x.length;i++){
xPlusOffset[i]=x[i]^offset[i];
}
return findSolution(xPlusOffset,matrix)!=null;
}
// public static boolean isXInRange(boolean[] x, boolean[][] matrix, boolean[] offset){
// if(x==null || matrix==null || offset==null){
// return false;
// }
// if(x.length!=matrix[0].length || x.length!=offset.length){
// return false;
// }
// //compute x'=x+offset and then test if there exists a solution y that matrix*y=x'
// boolean[] xPlusOffset = new boolean[x.length];
// for(int i=0;i<x.length;i++){
// xPlusOffset[i]=x[i]^offset[i];
// }
// //transpose
// boolean[][] mt = transpose(matrix);
// //augmented
// boolean[][] augMt = augmentedMatrix(mt, xPlusOffset);
// if(augMt==null){
// return false;
// }
// int numCol=mt[0].length;
// //GJ-eliminatation
// int rank=gaussJordanElimination(augMt, augMt.length, numCol);
// for(int i=rank;i<augMt.length;i++){
// if(augMt[i][numCol]){
// boolean consistent=false;
// for(int j=0;j<numCol;j++){
// if(augMt[i][j]){
// consistent=true;
// break;
// }
// }
// if(!consistent){
// return false;
// }
// }
// }
// return true;
// }
/**
* return the transpose of a boolean matrix
* @param matrix
* @return
*/
public static boolean[][] transpose(boolean[][] matrix){
if(matrix==null){
return null;
}
int m=matrix.length;
int n=matrix[0].length;
boolean[][] t = new boolean[n][m];
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
t[j][i]=matrix[i][j];
}
}
return t;
}
/**
* augmented matrix [m | offset], i.e., append offset to the matrix
* @param matrix
* @return
*/
public static boolean[][] augmentedMatrix(boolean[][] matrix, boolean[] offset){
if(matrix==null){
return null;
}
int m=matrix.length;
int n=matrix[0].length;
if(m!=offset.length){
return null;
}
//copy the original matrix
boolean[][] augMatrix = new boolean[m][n+1];
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
augMatrix[i][j]=matrix[i][j];
}
}
//append offset
for(int i=0;i<m;i++){
augMatrix[i][n]=offset[i];
}
return augMatrix;
}
public static void printVector(boolean[] vec){
if(vec==null){
System.out.println("null vector");
return;
}
for(int i=0;i<vec.length;i++){
System.out.print(vec[i]?1:0);
// System.out.println("x"+i+":"+vec[i]);
}
System.out.print("\n");
}
public static void printMatrix(boolean[][] matrix){
if(matrix==null){
return;
}
int m = matrix.length;
if(m==0){
return;
}
int n = matrix[0].length;
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
System.out.print(" "+(matrix[i][j]?1:0));
}
System.out.print('\n');
}
}
public static int getDensity(boolean[] vec){
int n = vec.length;
if(n==0){
return 0;
}
int count=0;
for(int i=0;i<n;i++){
if(vec[i])
count++;
}
return count;
}
public static int getDensity(boolean[][] matrix){
int m = matrix.length;
if(m==0){
return 0;
}
int n = matrix[0].length;
int count=0;
for(int i=0;i<m;i++){
for(int j=0;j<n;j++){
if(matrix[i][j]){
count++;
}
}
}
return count;
}
/**
* transform a long value into a boolean array
* @param value
* @param length
* @return
*/
public static boolean[] toBits(long value, int length){
boolean[] bits = new boolean[length];
for (int i = length-1; i >= 0; i--) {
bits[i] = (value & (1 << i)) != 0;
}
return bits;
}
}
| |
package com.thinkaurelius.titan.graphdb.transaction;
import com.carrotsearch.hppc.LongArrayList;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.Weigher;
import com.google.common.collect.*;
import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.core.attribute.Cmp;
import com.thinkaurelius.titan.core.attribute.Duration;
import com.thinkaurelius.titan.core.schema.*;
import com.thinkaurelius.titan.core.schema.SchemaInspector;
import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.util.time.StandardDuration;
import com.thinkaurelius.titan.diskstorage.util.time.TimestampProvider;
import com.thinkaurelius.titan.diskstorage.BackendTransaction;
import com.thinkaurelius.titan.diskstorage.EntryList;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.SliceQuery;
import com.thinkaurelius.titan.graphdb.blueprints.TitanBlueprintsTransaction;
import com.thinkaurelius.titan.graphdb.database.EdgeSerializer;
import com.thinkaurelius.titan.graphdb.database.IndexSerializer;
import com.thinkaurelius.titan.graphdb.database.StandardTitanGraph;
import com.thinkaurelius.titan.graphdb.database.idassigner.IDPool;
import com.thinkaurelius.titan.graphdb.database.serialize.AttributeHandling;
import com.thinkaurelius.titan.graphdb.idmanagement.IDInspector;
import com.thinkaurelius.titan.graphdb.idmanagement.IDManager;
import com.thinkaurelius.titan.graphdb.internal.*;
import com.thinkaurelius.titan.graphdb.query.*;
import com.thinkaurelius.titan.graphdb.query.condition.*;
import com.thinkaurelius.titan.graphdb.query.graph.GraphCentricQuery;
import com.thinkaurelius.titan.graphdb.query.graph.GraphCentricQueryBuilder;
import com.thinkaurelius.titan.graphdb.query.graph.IndexQueryBuilder;
import com.thinkaurelius.titan.graphdb.query.graph.JointIndexQuery;
import com.thinkaurelius.titan.graphdb.query.vertex.MultiVertexCentricQueryBuilder;
import com.thinkaurelius.titan.graphdb.query.vertex.VertexCentricQuery;
import com.thinkaurelius.titan.graphdb.query.vertex.VertexCentricQueryBuilder;
import com.thinkaurelius.titan.graphdb.relations.RelationComparator;
import com.thinkaurelius.titan.graphdb.relations.RelationIdentifier;
import com.thinkaurelius.titan.graphdb.relations.StandardEdge;
import com.thinkaurelius.titan.graphdb.relations.StandardProperty;
import com.thinkaurelius.titan.graphdb.transaction.addedrelations.AddedRelationsContainer;
import com.thinkaurelius.titan.graphdb.transaction.addedrelations.ConcurrentBufferAddedRelations;
import com.thinkaurelius.titan.graphdb.transaction.addedrelations.SimpleBufferAddedRelations;
import com.thinkaurelius.titan.graphdb.transaction.indexcache.ConcurrentIndexCache;
import com.thinkaurelius.titan.graphdb.transaction.indexcache.IndexCache;
import com.thinkaurelius.titan.graphdb.transaction.indexcache.SimpleIndexCache;
import com.thinkaurelius.titan.graphdb.transaction.lock.*;
import com.thinkaurelius.titan.graphdb.transaction.vertexcache.GuavaVertexCache;
import com.thinkaurelius.titan.graphdb.transaction.vertexcache.VertexCache;
import com.thinkaurelius.titan.graphdb.types.*;
import com.thinkaurelius.titan.graphdb.types.system.*;
import com.thinkaurelius.titan.graphdb.types.vertices.EdgeLabelVertex;
import com.thinkaurelius.titan.graphdb.types.vertices.PropertyKeyVertex;
import com.thinkaurelius.titan.graphdb.types.vertices.TitanSchemaVertex;
import com.thinkaurelius.titan.graphdb.util.IndexHelper;
import com.thinkaurelius.titan.graphdb.util.VertexCentricEdgeIterable;
import com.thinkaurelius.titan.graphdb.vertices.CacheVertex;
import com.thinkaurelius.titan.graphdb.vertices.StandardVertex;
import com.thinkaurelius.titan.util.datastructures.Retriever;
import com.thinkaurelius.titan.util.stats.MetricManager;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.lang.StringUtils;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author Matthias Broecheler (me@matthiasb.com)
*/
public class StandardTitanTx extends TitanBlueprintsTransaction implements TypeInspector, SchemaInspector, VertexFactory {
private static final Logger log = LoggerFactory.getLogger(StandardTitanTx.class);
private static final Map<Long, InternalRelation> EMPTY_DELETED_RELATIONS = ImmutableMap.of();
private static final ConcurrentMap<LockTuple, TransactionLock> UNINITIALIZED_LOCKS = null;
private static final Duration LOCK_TIMEOUT = new StandardDuration(5000L, TimeUnit.MILLISECONDS);
/**
* This is a workaround for #893. Cache sizes small relative to the level
* of thread parallelism can lead to Titan generating multiple copies of
* a single vertex in a single transaction.
*/
private static final long MIN_VERTEX_CACHE_SIZE = 100L;
private final StandardTitanGraph graph;
private final TransactionConfiguration config;
private final IDManager idManager;
private final IDInspector idInspector;
private final AttributeHandling attributeHandler;
private BackendTransaction txHandle;
private final EdgeSerializer edgeSerializer;
private final IndexSerializer indexSerializer;
/* ###############################################
Internal Data Structures
############################################### */
//####### Vertex Cache
/**
* Keeps track of vertices already loaded in memory. Cannot release vertices with added relations.
*/
private final VertexCache vertexCache;
//######## Data structures that keep track of new and deleted elements
//These data structures cannot release elements, since we would loose track of what was added or deleted
/**
* Keeps track of all added relations in this transaction
*/
private final AddedRelationsContainer addedRelations;
/**
* Keeps track of all deleted relations in this transaction
*/
private Map<Long, InternalRelation> deletedRelations;
//######## Index Caches
/**
* Caches the result of index calls so that repeated index queries don't need
* to be passed to the IndexProvider. This cache will drop entries when it overflows
* since the result set can always be retrieved from the IndexProvider
*/
private final Cache<JointIndexQuery.Subquery, List<Object>> indexCache;
/**
* Builds an inverted index for newly added properties so they can be considered in index queries.
* This cache my not release elements since that would entail an expensive linear scan over addedRelations
*/
private final IndexCache newVertexIndexEntries;
//######## Lock applications
/**
* Transaction-local data structure for unique lock applications so that conflicting applications can be discovered
* at the transactional level.
*/
private ConcurrentMap<LockTuple, TransactionLock> uniqueLocks;
//####### Other Data structures
/**
* Caches Titan types by name so that they can be quickly retrieved once they are loaded in the transaction.
* Since type retrieval by name is common and there are only a few types, since cache is a simple map (i.e. no release)
*/
private final Map<String, Long> newTypeCache;
/**
* Used to assign temporary ids to new vertices and relations added in this transaction.
* If ids are assigned immediately, this is not used. This IDPool is shared across all elements.
*/
private final IDPool temporaryIds;
/**
* This belongs in TitanConfig.
*/
private final TimestampProvider times;
/**
* Whether or not this transaction is open
*/
private boolean isOpen;
private final Retriever<Long, InternalVertex> existingVertexRetriever = new VertexConstructor(false);
private final Retriever<Long, InternalVertex> externalVertexRetriever;
private final Retriever<Long, InternalVertex> internalVertexRetriever;
public StandardTitanTx(StandardTitanGraph graph, TransactionConfiguration config) {
Preconditions.checkNotNull(graph);
Preconditions.checkArgument(graph.isOpen());
Preconditions.checkNotNull(config);
this.graph = graph;
this.times = graph.getConfiguration().getTimestampProvider();
this.config = config;
this.idManager = graph.getIDManager();
this.idInspector = idManager.getIdInspector();
this.attributeHandler = graph.getDataSerializer();
this.edgeSerializer = graph.getEdgeSerializer();
this.indexSerializer = graph.getIndexSerializer();
temporaryIds = new IDPool() {
private final AtomicLong counter = new AtomicLong(1);
@Override
public long nextID() {
return counter.getAndIncrement();
}
@Override
public void close() {
//Do nothing
}
};
int concurrencyLevel;
if (config.isSingleThreaded()) {
addedRelations = new SimpleBufferAddedRelations();
concurrencyLevel = 1;
newTypeCache = new HashMap<String, Long>();
newVertexIndexEntries = new SimpleIndexCache();
} else {
addedRelations = new ConcurrentBufferAddedRelations();
concurrencyLevel = 1; //TODO: should we increase this?
newTypeCache = new NonBlockingHashMap<String, Long>();
newVertexIndexEntries = new ConcurrentIndexCache();
}
externalVertexRetriever = new VertexConstructor(config.hasVerifyExternalVertexExistence());
internalVertexRetriever = new VertexConstructor(config.hasVerifyInternalVertexExistence());
long effectiveVertexCacheSize = config.getVertexCacheSize();
if (!config.isReadOnly()) {
effectiveVertexCacheSize = Math.max(MIN_VERTEX_CACHE_SIZE, effectiveVertexCacheSize);
log.debug("Guava vertex cache size: requested={} effective={} (min={})",
config.getVertexCacheSize(), effectiveVertexCacheSize, MIN_VERTEX_CACHE_SIZE);
}
vertexCache = new GuavaVertexCache(effectiveVertexCacheSize,concurrencyLevel,config.getDirtyVertexSize());
indexCache = CacheBuilder.newBuilder().weigher(new Weigher<JointIndexQuery.Subquery, List<Object>>() {
@Override
public int weigh(JointIndexQuery.Subquery q, List<Object> r) {
return 2 + r.size();
}
}).concurrencyLevel(concurrencyLevel).maximumWeight(config.getIndexCacheWeight()).build();
uniqueLocks = UNINITIALIZED_LOCKS;
deletedRelations = EMPTY_DELETED_RELATIONS;
this.isOpen = true;
if (null != config.getGroupName()) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "tx", "begin").inc();
elementProcessor = new MetricsQueryExecutor<GraphCentricQuery, TitanElement, JointIndexQuery>(config.getGroupName(), "graph", elementProcessorImpl);
edgeProcessor = new MetricsQueryExecutor<VertexCentricQuery, TitanRelation, SliceQuery>(config.getGroupName(), "vertex", edgeProcessorImpl);
} else {
elementProcessor = elementProcessorImpl;
edgeProcessor = edgeProcessorImpl;
}
}
public void setBackendTransaction(BackendTransaction txHandle) {
Preconditions.checkArgument(this.txHandle==null && txHandle!=null);
this.txHandle = txHandle;
}
/*
* ------------------------------------ Utility Access Verification methods ------------------------------------
*/
private void verifyWriteAccess(TitanVertex... vertices) {
if (config.isReadOnly())
throw new UnsupportedOperationException("Cannot create new entities in read-only transaction");
for (TitanVertex v : vertices) {
if (v.hasId() && idInspector.isUnmodifiableVertex(v.getLongId()) && !v.isNew())
throw new SchemaViolationException("Cannot modify unmodifiable vertex: "+v);
}
verifyAccess(vertices);
}
public final void verifyAccess(TitanVertex... vertices) {
verifyOpen();
for (TitanVertex v : vertices) {
Preconditions.checkArgument(v instanceof InternalVertex, "Invalid vertex: %s", v);
if (!(v instanceof SystemRelationType) && this != ((InternalVertex) v).tx())
throw new IllegalStateException("The vertex or type is not associated with this transaction [" + v + "]");
if (v.isRemoved())
throw new IllegalStateException("The vertex or type has been removed [" + v + "]");
}
}
private void verifyOpen() {
if (isClosed())
throw new IllegalStateException("Operation cannot be executed because the enclosing transaction is closed");
}
/*
* ------------------------------------ External Access ------------------------------------
*/
public StandardTitanTx getNextTx() {
Preconditions.checkArgument(isClosed());
if (!config.isThreadBound())
throw new IllegalStateException("Cannot access element because its enclosing transaction is closed and unbound");
else return (StandardTitanTx) graph.getCurrentThreadTx();
}
public TransactionConfiguration getConfiguration() {
return config;
}
@Override
public StandardTitanGraph getGraph() {
return graph;
}
public BackendTransaction getTxHandle() {
return txHandle;
}
public EdgeSerializer getEdgeSerializer() {
return edgeSerializer;
}
public IDInspector getIdInspector() {
return idInspector;
}
public boolean isPartitionedVertex(TitanVertex vertex) {
return vertex.hasId() && idInspector.isPartitionedVertex(vertex.getLongId());
}
public InternalVertex getCanonicalVertex(InternalVertex partitionedVertex) {
Preconditions.checkArgument(isPartitionedVertex(partitionedVertex));
long canonicalId = idManager.getCanonicalVertexId(partitionedVertex.getLongId());
if (canonicalId==partitionedVertex.getLongId()) return partitionedVertex;
else return getExistingVertex(canonicalId);
}
public InternalVertex getOtherPartitionVertex(TitanVertex partitionedVertex, long otherPartition) {
Preconditions.checkArgument(isPartitionedVertex(partitionedVertex));
return getExistingVertex(idManager.getPartitionedVertexId(partitionedVertex.getLongId(), otherPartition));
}
public InternalVertex[] getAllRepresentatives(TitanVertex partitionedVertex, boolean restrict2Partitions) {
Preconditions.checkArgument(isPartitionedVertex(partitionedVertex));
long[] ids;
if (!restrict2Partitions || !config.hasRestrictedPartitions()) {
ids = idManager.getPartitionedVertexRepresentatives(partitionedVertex.getLongId());
} else {
int[] restrictedParititions = config.getRestrictedPartitions();
ids = new long[restrictedParititions.length];
for (int i=0;i<ids.length;i++) {
ids[i]=idManager.getPartitionedVertexId(partitionedVertex.getLongId(),restrictedParititions[i]);
}
}
Preconditions.checkArgument(ids.length>0);
InternalVertex[] vertices = new InternalVertex[ids.length];
for (int i=0;i<ids.length;i++) vertices[i]=getExistingVertex(ids[i]);
return vertices;
}
/*
* ------------------------------------ Vertex Handling ------------------------------------
*/
@Override
public boolean containsVertex(final long vertexid) {
return getVertex(vertexid) != null;
}
private boolean isValidVertexId(long id) {
return id>0 && (idInspector.isSchemaVertexId(id) || idInspector.isUserVertexId(id));
}
@Override
public TitanVertex getVertex(long vertexid) {
verifyOpen();
if (null != config.getGroupName()) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "db", "getVertexByID").inc();
}
if (!isValidVertexId(vertexid)) return null;
//Make canonical partitioned vertex id
if (idInspector.isPartitionedVertex(vertexid)) vertexid=idManager.getCanonicalVertexId(vertexid);
InternalVertex v = null;
v = vertexCache.get(vertexid, externalVertexRetriever);
return (null == v || v.isRemoved()) ? null : v;
}
@Override
public Map<Long,TitanVertex> getVertices(long... ids) {
verifyOpen();
Preconditions.checkArgument(ids != null, "Need to provide valid ids");
if (null != config.getGroupName()) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "db", "getVerticesByID").inc();
}
Map<Long,TitanVertex> result = new HashMap<Long,TitanVertex>(ids.length);
LongArrayList vids = new LongArrayList(ids.length);
for (long id : ids) {
if (isValidVertexId(id)) {
if (idInspector.isPartitionedVertex(id)) id=idManager.getCanonicalVertexId(id);
if (vertexCache.contains(id))
result.put(id,vertexCache.get(id, existingVertexRetriever));
else
vids.add(id);
}
}
if (!vids.isEmpty()) {
List<EntryList> existence = graph.edgeMultiQuery(vids,graph.vertexExistenceQuery,txHandle);
for (int i = 0; i < vids.size(); i++) {
if (!existence.get(i).isEmpty()) {
long id = vids.get(i);
result.put(id,vertexCache.get(id, existingVertexRetriever));
}
}
}
//Filter out potentially removed vertices
for (Iterator<Map.Entry<Long, TitanVertex>> iterator = result.entrySet().iterator(); iterator.hasNext(); ) {
if (iterator.next().getValue().isRemoved()) iterator.remove();
}
return result;
}
private InternalVertex getExistingVertex(long vertexid) {
//return vertex no matter what, even if deleted, and assume the id has the correct format
return vertexCache.get(vertexid, existingVertexRetriever);
}
public InternalVertex getInternalVertex(long vertexid) {
//return vertex but potentially check for existence
return vertexCache.get(vertexid, internalVertexRetriever);
}
private class VertexConstructor implements Retriever<Long, InternalVertex> {
private final boolean verifyExistence;
private VertexConstructor(boolean verifyExistence) {
this.verifyExistence = verifyExistence;
}
@Override
public InternalVertex get(Long vertexid) {
Preconditions.checkArgument(vertexid!=null && vertexid > 0, "Invalid vertex id: %s",vertexid);
Preconditions.checkArgument(idInspector.isSchemaVertexId(vertexid) || idInspector.isUserVertexId(vertexid), "Not a valid vertex id: %s", vertexid);
byte lifecycle = ElementLifeCycle.Loaded;
long canonicalVertexId = idInspector.isPartitionedVertex(vertexid)?idManager.getCanonicalVertexId(vertexid):vertexid;
if (verifyExistence) {
if (graph.edgeQuery(canonicalVertexId, graph.vertexExistenceQuery, txHandle).isEmpty())
lifecycle = ElementLifeCycle.Removed;
}
if (canonicalVertexId!=vertexid) {
//Take lifecycle from canonical representative
lifecycle = getExistingVertex(canonicalVertexId).getLifeCycle();
}
InternalVertex vertex = null;
if (idInspector.isRelationTypeId(vertexid)) {
if (idInspector.isPropertyKeyId(vertexid)) {
if (idInspector.isSystemRelationTypeId(vertexid)) {
vertex = SystemTypeManager.getSystemType(vertexid);
} else {
vertex = new PropertyKeyVertex(StandardTitanTx.this, vertexid, lifecycle);
}
} else {
assert idInspector.isEdgeLabelId(vertexid);
if (idInspector.isSystemRelationTypeId(vertexid)) {
vertex = SystemTypeManager.getSystemType(vertexid);
} else {
vertex = new EdgeLabelVertex(StandardTitanTx.this, vertexid, lifecycle);
}
}
} else if (idInspector.isVertexLabelVertexId(vertexid)) {
vertex = new VertexLabelVertex(StandardTitanTx.this,vertexid, lifecycle);
} else if (idInspector.isGenericSchemaVertexId(vertexid)) {
vertex = new TitanSchemaVertex(StandardTitanTx.this,vertexid, lifecycle);
} else if (idInspector.isUserVertexId(vertexid)) {
vertex = new CacheVertex(StandardTitanTx.this, vertexid, lifecycle);
} else throw new IllegalArgumentException("ID could not be recognized");
return vertex;
}
}
@Override
public TitanVertex addVertex(Long vertexId, VertexLabel label) {
verifyWriteAccess();
if (label==null) label=BaseVertexLabel.DEFAULT_VERTEXLABEL;
if (vertexId != null && !graph.getConfiguration().allowVertexIdSetting()) {
log.info("Provided vertex id [{}] is ignored because vertex id setting is not enabled", vertexId);
vertexId = null;
}
Preconditions.checkArgument(vertexId != null || !graph.getConfiguration().allowVertexIdSetting(), "Must provide vertex id");
Preconditions.checkArgument(vertexId == null || IDManager.VertexIDType.NormalVertex.is(vertexId), "Not a valid vertex id: %s", vertexId);
Preconditions.checkArgument(vertexId == null || ((InternalVertexLabel)label).hasDefaultConfiguration(), "Cannot only use default vertex labels: %s",label);
Preconditions.checkArgument(vertexId == null || !config.hasVerifyExternalVertexExistence() || !containsVertex(vertexId), "Vertex with given id already exists: %s", vertexId);
StandardVertex vertex = new StandardVertex(this, IDManager.getTemporaryVertexID(IDManager.VertexIDType.NormalVertex, temporaryIds.nextID()), ElementLifeCycle.New);
if (vertexId != null) {
vertex.setId(vertexId);
} else if (config.hasAssignIDsImmediately() || label.isPartitioned()) {
graph.assignID(vertex,label);
}
addProperty(vertex, BaseKey.VertexExists, Boolean.TRUE);
if (label!=BaseVertexLabel.DEFAULT_VERTEXLABEL) { //Add label
Preconditions.checkArgument(label instanceof VertexLabelVertex);
addEdge(vertex, (VertexLabelVertex) label, BaseLabel.VertexLabelEdge);
}
vertexCache.add(vertex, vertex.getLongId());
return vertex;
}
@Override
public TitanVertex addVertex() {
return addVertexWithLabel(BaseVertexLabel.DEFAULT_VERTEXLABEL);
}
@Override
public TitanVertex addVertexWithLabel(String vertexLabel) {
return addVertexWithLabel(getVertexLabel(vertexLabel));
}
@Override
public TitanVertex addVertexWithLabel(VertexLabel vertexLabel) {
return addVertex(null,vertexLabel);
}
private Iterable<InternalVertex> getInternalVertices() {
Iterable<InternalVertex> allVertices;
if (!addedRelations.isEmpty()) {
//There are possible new vertices
List<InternalVertex> newVs = vertexCache.getAllNew();
Iterator<InternalVertex> viter = newVs.iterator();
while (viter.hasNext()) {
if (viter.next() instanceof TitanSchemaElement) viter.remove();
}
allVertices = Iterables.concat(newVs, new VertexIterable(graph, this));
} else {
allVertices = new VertexIterable(graph, this);
}
//Filter out all but one PartitionVertex representative
return Iterables.filter(allVertices,new Predicate<InternalVertex>() {
@Override
public boolean apply(@Nullable InternalVertex internalVertex) {
return !isPartitionedVertex(internalVertex) || internalVertex.getLongId()==idInspector.getCanonicalVertexId(internalVertex.getLongId());
}
});
}
@Override
public Iterable<Vertex> getVertices() {
return (Iterable)getInternalVertices();
}
/*
* ------------------------------------ Adding and Removing Relations ------------------------------------
*/
public final Object verifyAttribute(PropertyKey key, Object attribute) {
if (attribute==null) throw new SchemaViolationException("Property value cannot be null");
Class<?> datatype = key.getDataType();
if (datatype.equals(Object.class)) {
return attribute;
} else {
if (!attribute.getClass().equals(datatype)) {
Object converted = null;
try {
converted = attributeHandler.convert(datatype, attribute);
} catch (IllegalArgumentException e) {
//Just means that data could not be converted
}
if (converted == null) throw new SchemaViolationException(
"Value [%s] is not an instance of the expected data type for property key [%s] and cannot be converted. Expected: %s, found: %s", attribute,
key.getName(), datatype, attribute.getClass());
attribute = converted;
}
Preconditions.checkState(attribute.getClass().equals(datatype));
attributeHandler.verifyAttribute(datatype, attribute);
return attribute;
}
}
public void removeRelation(InternalRelation relation) {
Preconditions.checkArgument(!relation.isRemoved());
relation = relation.it();
for (int i = 0; i < relation.getLen(); i++)
verifyWriteAccess(relation.getVertex(i));
//Delete from Vertex
for (int i = 0; i < relation.getLen(); i++) {
relation.getVertex(i).removeRelation(relation);
}
//Update transaction data structures
if (relation.isNew()) {
addedRelations.remove(relation);
if (TypeUtil.hasSimpleInternalVertexKeyIndex(relation)) newVertexIndexEntries.remove((TitanProperty) relation);
} else {
Preconditions.checkArgument(relation.isLoaded());
if (deletedRelations == EMPTY_DELETED_RELATIONS) {
if (config.isSingleThreaded()) {
deletedRelations = new HashMap<Long, InternalRelation>();
} else {
synchronized (this) {
if (deletedRelations == EMPTY_DELETED_RELATIONS)
deletedRelations = new ConcurrentHashMap<Long, InternalRelation>();
}
}
}
deletedRelations.put(relation.getLongId(), relation);
}
}
public boolean isRemovedRelation(Long relationId) {
return deletedRelations.containsKey(relationId);
}
private TransactionLock getLock(final Object... tuple) {
return getLock(new LockTuple(tuple));
}
private TransactionLock getLock(final LockTuple la) {
if (config.isSingleThreaded()) return FakeLock.INSTANCE;
if (uniqueLocks == UNINITIALIZED_LOCKS) {
Preconditions.checkArgument(!config.isSingleThreaded());
synchronized (this) {
if (uniqueLocks == UNINITIALIZED_LOCKS)
uniqueLocks = new ConcurrentHashMap<LockTuple, TransactionLock>();
}
}
//TODO: clean out no longer used locks from uniqueLocks when it grows to large (use ReadWriteLock to protect against race conditions)
TransactionLock lock = new ReentrantTransactionLock();
TransactionLock existingLock = uniqueLocks.putIfAbsent(la, lock);
return (existingLock == null)?lock:existingLock;
}
private TransactionLock getUniquenessLock(final TitanVertex out, final InternalRelationType type, final Object in) {
Multiplicity multiplicity = type.getMultiplicity();
TransactionLock uniqueLock = FakeLock.INSTANCE;
if (config.hasVerifyUniqueness() && multiplicity.isConstrained()) {
uniqueLock = null;
if (multiplicity==Multiplicity.SIMPLE) {
uniqueLock = getLock(out, type, in);
} else {
for (Direction dir : Direction.proper) {
if (multiplicity.isUnique(dir)) {
TransactionLock lock = getLock(dir == Direction.OUT ? out : in, type, dir);
if (uniqueLock==null) uniqueLock=lock;
else uniqueLock=new CombinerLock(uniqueLock,lock,times);
}
}
}
}
assert uniqueLock!=null;
return uniqueLock;
}
@Override
public TitanEdge addEdge(TitanVertex outVertex, TitanVertex inVertex, EdgeLabel label) {
verifyWriteAccess(outVertex, inVertex);
outVertex = ((InternalVertex) outVertex).it();
inVertex = ((InternalVertex) inVertex).it();
Preconditions.checkNotNull(label);
Multiplicity multiplicity = label.getMultiplicity();
TransactionLock uniqueLock = getUniquenessLock(outVertex, (InternalRelationType) label,inVertex);
uniqueLock.lock(LOCK_TIMEOUT);
try {
//Check uniqueness
if (config.hasVerifyUniqueness()) {
if (multiplicity==Multiplicity.SIMPLE) {
if (!Iterables.isEmpty(query(outVertex).type(label).direction(Direction.OUT).adjacent(inVertex).titanEdges()))
throw new SchemaViolationException("An edge with the given label already exists between the pair of vertices and the label [%s] is simple", label.getName());
}
if (multiplicity.isUnique(Direction.OUT)) {
if (!Iterables.isEmpty(query(outVertex).type(label).direction(Direction.OUT).titanEdges()))
throw new SchemaViolationException("An edge with the given label already exists on the out-vertex and the label [%s] is out-unique", label.getName());
}
if (multiplicity.isUnique(Direction.IN)) {
if (!Iterables.isEmpty(query(inVertex).type(label).direction(Direction.IN).titanEdges()))
throw new SchemaViolationException("An edge with the given label already exists on the in-vertex and the label [%s] is in-unique", label.getName());
}
}
StandardEdge edge = new StandardEdge(IDManager.getTemporaryRelationID(temporaryIds.nextID()), label, (InternalVertex) outVertex, (InternalVertex) inVertex, ElementLifeCycle.New);
if (config.hasAssignIDsImmediately()) graph.assignID(edge);
connectRelation(edge);
return edge;
} finally {
uniqueLock.unlock();
}
}
private void connectRelation(InternalRelation r) {
for (int i = 0; i < r.getLen(); i++) {
boolean success = r.getVertex(i).addRelation(r);
if (!success) throw new AssertionError("Could not connect relation: " + r);
}
addedRelations.add(r);
for (int pos = 0; pos < r.getLen(); pos++) vertexCache.add(r.getVertex(pos), r.getVertex(pos).getLongId());
if (TypeUtil.hasSimpleInternalVertexKeyIndex(r)) newVertexIndexEntries.add((TitanProperty) r);
}
@Override
public TitanProperty addProperty(TitanVertex vertex, PropertyKey key, Object value) {
if (key.getCardinality()== Cardinality.SINGLE) return setProperty(vertex, key, value);
else return addPropertyInternal(vertex, key, value, true);
}
public TitanProperty addPropertyInternal(TitanVertex vertex, final PropertyKey key, Object value, boolean verifyCardinalityConstraint) {
verifyWriteAccess(vertex);
Preconditions.checkArgument(!(key instanceof ImplicitKey),"Cannot create a property of implicit type: %s",key.getName());
vertex = ((InternalVertex) vertex).it();
Preconditions.checkNotNull(key);
final Object normalizedValue = verifyAttribute(key, value);
Cardinality cardinality = key.getCardinality();
//Determine unique indexes
List<IndexLockTuple> uniqueIndexTuples = new ArrayList<IndexLockTuple>();
for (CompositeIndexType index : TypeUtil.getUniqueIndexes(key)) {
IndexSerializer.IndexRecords matches = IndexSerializer.indexMatches(vertex, index, key, normalizedValue);
for (Object[] match : matches.getRecordValues()) uniqueIndexTuples.add(new IndexLockTuple(index,match));
}
TransactionLock uniqueLock = getUniquenessLock(vertex, (InternalRelationType) key, normalizedValue);
//Add locks for unique indexes
for (IndexLockTuple lockTuple : uniqueIndexTuples) uniqueLock = new CombinerLock(uniqueLock,getLock(lockTuple),times);
uniqueLock.lock(LOCK_TIMEOUT);
try {
//Check uniqueness
if (config.hasVerifyUniqueness()) {
if (verifyCardinalityConstraint) {
if (cardinality == Cardinality.SINGLE) {
if (!Iterables.isEmpty(query(vertex).type(key).properties()))
throw new SchemaViolationException("A property with the given key [%s] already exists on the vertex [%s] and the property key is defined as single-valued", key.getName(), vertex);
}
if (cardinality == Cardinality.SET) {
if (!Iterables.isEmpty(Iterables.filter(query(vertex).type(key).properties(), new Predicate<TitanProperty>() {
@Override
public boolean apply(@Nullable TitanProperty titanProperty) {
return normalizedValue.equals(titanProperty.getValue());
}
})))
throw new SchemaViolationException("A property with the given key [%s] and value [%s] already exists on the vertex and the property key is defined as set-valued", key.getName(), normalizedValue);
}
}
//Check all unique indexes
for (IndexLockTuple lockTuple : uniqueIndexTuples) {
if (!Iterables.isEmpty(IndexHelper.getQueryResults(lockTuple.getIndex(), lockTuple.getAll(), this)))
throw new SchemaViolationException("Adding this property for key [%s] and value [%s] violates a uniqueness constraint [%s]", key.getName(), normalizedValue, lockTuple.getIndex());
}
}
StandardProperty prop = new StandardProperty(IDManager.getTemporaryRelationID(temporaryIds.nextID()), key, (InternalVertex) vertex, normalizedValue, ElementLifeCycle.New);
if (config.hasAssignIDsImmediately()) graph.assignID(prop);
connectRelation(prop);
return prop;
} finally {
uniqueLock.unlock();
}
}
public TitanProperty setProperty(TitanVertex vertex, final PropertyKey key, Object value) {
verifyWriteAccess(vertex);
Preconditions.checkNotNull(key);
if (key.getCardinality()!=Cardinality.SINGLE) throw new UnsupportedOperationException("Not a single key: "+key+". Use addProperty instead");
TransactionLock uniqueLock = FakeLock.INSTANCE;
try {
if (config.hasVerifyUniqueness()) {
//Acquire uniqueness lock, remove and add
uniqueLock = getLock(vertex, key, Direction.OUT);
}
uniqueLock.lock(LOCK_TIMEOUT);
/* If we are simply overwriting a vertex property, then we don't have to explicitly remove it thereby saving a read operation
However, this only applies if
1) we don't lock on the property key or consistency checks are disabled and
2) there are no indexes for this property key
*/
if ((config.hasVerifyUniqueness() && ((InternalRelationType)key).getConsistencyModifier()==ConsistencyModifier.LOCK) ||
TypeUtil.hasAnyIndex(key)) {
vertex.removeProperty(key);
} else {
//Only delete in-memory
InternalVertex v = (InternalVertex) vertex;
for (InternalRelation r : v.it().getAddedRelations(new Predicate<InternalRelation>() {
@Override
public boolean apply(@Nullable InternalRelation p) {
return p.getType().equals(key);
}
})) {
r.remove();
}
}
return addPropertyInternal(vertex, key, value, false);
} finally {
uniqueLock.unlock();
}
}
@Override
public Iterable<Edge> getEdges() {
return new VertexCentricEdgeIterable(getInternalVertices(),RelationCategory.EDGE);
}
/*
* ------------------------------------ Schema Handling ------------------------------------
*/
public final TitanSchemaVertex makeSchemaVertex(TitanSchemaCategory schemaCategory, String name, TypeDefinitionMap definition) {
verifyOpen();
Preconditions.checkArgument(!schemaCategory.hasName() || StringUtils.isNotBlank(name), "Need to provide a valid name for type [%s]", schemaCategory);
schemaCategory.verifyValidDefinition(definition);
TitanSchemaVertex schemaVertex;
if (schemaCategory.isRelationType()) {
if (schemaCategory == TitanSchemaCategory.PROPERTYKEY) {
schemaVertex = new PropertyKeyVertex(this, IDManager.getTemporaryVertexID(IDManager.VertexIDType.UserPropertyKey, temporaryIds.nextID()), ElementLifeCycle.New);
} else {
assert schemaCategory == TitanSchemaCategory.EDGELABEL;
schemaVertex = new EdgeLabelVertex(this, IDManager.getTemporaryVertexID(IDManager.VertexIDType.UserEdgeLabel,temporaryIds.nextID()), ElementLifeCycle.New);
}
} else if (schemaCategory==TitanSchemaCategory.VERTEXLABEL) {
schemaVertex = new VertexLabelVertex(this, IDManager.getTemporaryVertexID(IDManager.VertexIDType.GenericSchemaType,temporaryIds.nextID()), ElementLifeCycle.New);
} else {
schemaVertex = new TitanSchemaVertex(this, IDManager.getTemporaryVertexID(IDManager.VertexIDType.GenericSchemaType,temporaryIds.nextID()), ElementLifeCycle.New);
}
graph.assignID(schemaVertex, BaseVertexLabel.DEFAULT_VERTEXLABEL);
Preconditions.checkArgument(schemaVertex.getLongId() > 0);
if (schemaCategory.hasName()) addProperty(schemaVertex, BaseKey.SchemaName, schemaCategory.getSchemaName(name));
addProperty(schemaVertex, BaseKey.VertexExists, Boolean.TRUE);
addProperty(schemaVertex, BaseKey.SchemaCategory, schemaCategory);
updateSchemaVertex(schemaVertex);
addProperty(schemaVertex, BaseKey.SchemaUpdateTime, times.getTime().getNativeTimestamp());
for (Map.Entry<TypeDefinitionCategory,Object> def : definition.entrySet()) {
TitanProperty p = addProperty(schemaVertex, BaseKey.SchemaDefinitionProperty,def.getValue());
p.setProperty(BaseKey.SchemaDefinitionDesc,TypeDefinitionDescription.of(def.getKey()));
}
vertexCache.add(schemaVertex, schemaVertex.getLongId());
if (schemaCategory.hasName()) newTypeCache.put(schemaCategory.getSchemaName(name), schemaVertex.getLongId());
return schemaVertex;
}
public void updateSchemaVertex(TitanSchemaVertex schemaVertex) {
setProperty(schemaVertex, BaseKey.SchemaUpdateTime, times.getTime().getNativeTimestamp());
}
public PropertyKey makePropertyKey(String name, TypeDefinitionMap definition) {
return (PropertyKey) makeSchemaVertex(TitanSchemaCategory.PROPERTYKEY, name, definition);
}
public EdgeLabel makeEdgeLabel(String name, TypeDefinitionMap definition) {
return (EdgeLabel) makeSchemaVertex(TitanSchemaCategory.EDGELABEL, name, definition);
}
public TitanSchemaVertex getSchemaVertex(String schemaName) {
Long schemaId = newTypeCache.get(schemaName);
if (schemaId==null) schemaId=graph.getSchemaCache().getSchemaId(schemaName);
if (schemaId != null) {
InternalVertex typeVertex = vertexCache.get(schemaId, existingVertexRetriever);
assert typeVertex!=null;
return (TitanSchemaVertex)typeVertex;
} else return null;
}
@Override
public boolean containsRelationType(String name) {
return getRelationType(name)!=null;
}
@Override
public RelationType getRelationType(String name) {
verifyOpen();
RelationType type = SystemTypeManager.getSystemType(name);
if (type!=null) return type;
return (RelationType)getSchemaVertex(TitanSchemaCategory.getRelationTypeName(name));
}
@Override
public boolean containsPropertyKey(String name) {
RelationType type = getRelationType(name);
return type!=null && type.isPropertyKey();
}
@Override
public boolean containsEdgeLabel(String name) {
RelationType type = getRelationType(name);
return type!=null && type.isEdgeLabel();
}
// this is critical path we can't allow anything heavier then assertion in here
@Override
public RelationType getExistingRelationType(long typeid) {
assert idInspector.isRelationTypeId(typeid);
if (idInspector.isSystemRelationTypeId(typeid)) {
return SystemTypeManager.getSystemType(typeid);
} else {
InternalVertex v = getInternalVertex(typeid);
return (RelationType) v;
}
}
@Override
public PropertyKey getPropertyKey(String name) {
RelationType pk = getRelationType(name);
Preconditions.checkArgument(pk==null || pk.isPropertyKey(), "The relation type with name [%s] is not a property key",name);
return (PropertyKey)pk;
}
@Override
public PropertyKey getOrCreatePropertyKey(String name) {
RelationType et = getRelationType(name);
if (et == null) {
return config.getAutoSchemaMaker().makePropertyKey(makePropertyKey(name));
} else if (et.isPropertyKey()) {
return (PropertyKey) et;
} else
throw new IllegalArgumentException("The type of given name is not a key: " + name);
}
@Override
public EdgeLabel getEdgeLabel(String name) {
RelationType el = getRelationType(name);
Preconditions.checkArgument(el==null || el.isEdgeLabel(), "The relation type with name [%s] is not an edge label",name);
return (EdgeLabel)el;
}
@Override
public EdgeLabel getOrCreateEdgeLabel(String name) {
RelationType et = getRelationType(name);
if (et == null) {
return config.getAutoSchemaMaker().makeEdgeLabel(makeEdgeLabel(name));
} else if (et.isEdgeLabel()) {
return (EdgeLabel) et;
} else
throw new IllegalArgumentException("The type of given name is not a label: " + name);
}
@Override
public PropertyKeyMaker makePropertyKey(String name) {
StandardPropertyKeyMaker maker = new StandardPropertyKeyMaker(this, indexSerializer, attributeHandler);
maker.name(name);
return maker;
}
@Override
public EdgeLabelMaker makeEdgeLabel(String name) {
StandardEdgeLabelMaker maker = new StandardEdgeLabelMaker(this, indexSerializer, attributeHandler);
maker.name(name);
return maker;
}
//-------- Vertex Labels -----------------
@Override
public VertexLabel getExistingVertexLabel(long id) {
assert idInspector.isVertexLabelVertexId(id);
InternalVertex v = getInternalVertex(id);
return (VertexLabelVertex)v;
}
@Override
public boolean containsVertexLabel(String name) {
verifyOpen();
if (BaseVertexLabel.DEFAULT_VERTEXLABEL.getName().equals(name)) return true;
return getSchemaVertex(TitanSchemaCategory.VERTEXLABEL.getSchemaName(name))!=null;
}
@Override
public VertexLabel getVertexLabel(String name) {
verifyOpen();
if (BaseVertexLabel.DEFAULT_VERTEXLABEL.getName().equals(name)) return BaseVertexLabel.DEFAULT_VERTEXLABEL;
VertexLabel vlabel = (VertexLabel)getSchemaVertex(TitanSchemaCategory.VERTEXLABEL.getSchemaName(name));
if (vlabel==null) {
vlabel = config.getAutoSchemaMaker().makeVertexLabel(makeVertexLabel(name));
}
return vlabel;
}
@Override
public VertexLabelMaker makeVertexLabel(String name) {
StandardVertexLabelMaker maker = new StandardVertexLabelMaker(this);
maker.name(name);
return maker;
}
/*
* ------------------------------------ Query Answering ------------------------------------
*/
public VertexCentricQueryBuilder query(TitanVertex vertex) {
return new VertexCentricQueryBuilder(((InternalVertex) vertex).it());
}
@Override
public TitanMultiVertexQuery multiQuery(TitanVertex... vertices) {
MultiVertexCentricQueryBuilder builder = new MultiVertexCentricQueryBuilder(this);
for (TitanVertex v : vertices) builder.addVertex(v);
return builder;
}
@Override
public TitanMultiVertexQuery multiQuery(Collection<TitanVertex> vertices) {
MultiVertexCentricQueryBuilder builder = new MultiVertexCentricQueryBuilder(this);
builder.addAllVertices(vertices);
return builder;
}
public void executeMultiQuery(final Collection<InternalVertex> vertices, final SliceQuery sq) {
LongArrayList vids = new LongArrayList(vertices.size());
for (InternalVertex v : vertices) {
if (!v.isNew() && v.hasId() && (v instanceof CacheVertex) && !v.hasLoadedRelations(sq)) vids.add(v.getLongId());
}
if (!vids.isEmpty()) {
List<EntryList> results = graph.edgeMultiQuery(vids, sq, txHandle);
int pos = 0;
for (TitanVertex v : vertices) {
if (pos<vids.size() && vids.get(pos) == v.getLongId()) {
final EntryList vresults = results.get(pos);
((CacheVertex) v).loadRelations(sq, new Retriever<SliceQuery, EntryList>() {
@Override
public EntryList get(SliceQuery query) {
return vresults;
}
});
pos++;
}
}
}
}
public final QueryExecutor<VertexCentricQuery, TitanRelation, SliceQuery> edgeProcessor;
public final QueryExecutor<VertexCentricQuery, TitanRelation, SliceQuery> edgeProcessorImpl = new QueryExecutor<VertexCentricQuery, TitanRelation, SliceQuery>() {
@Override
public Iterator<TitanRelation> getNew(final VertexCentricQuery query) {
InternalVertex vertex = query.getVertex();
if (vertex.isNew() || vertex.hasAddedRelations()) {
return (Iterator) vertex.getAddedRelations(new Predicate<InternalRelation>() {
//Need to filter out self-loops if query only asks for one direction
private TitanRelation previous = null;
@Override
public boolean apply(@Nullable InternalRelation relation) {
if ((relation instanceof TitanEdge) && relation.isLoop()
&& query.getDirection() != Direction.BOTH) {
if (relation.equals(previous))
return false;
previous = relation;
}
return query.matches(relation);
}
}).iterator();
} else {
return Iterators.emptyIterator();
}
}
@Override
public boolean hasDeletions(VertexCentricQuery query) {
InternalVertex vertex = query.getVertex();
if (vertex.isNew()) return false;
//In addition to deleted, we need to also check for added relations since those can potentially
//replace existing ones due to a multiplicity constraint
if (vertex.hasRemovedRelations() || vertex.hasAddedRelations()) return true;
return false;
}
@Override
public boolean isDeleted(final VertexCentricQuery query, final TitanRelation result) {
if (deletedRelations.containsKey(result.getLongId()) || result != ((InternalRelation) result).it()) return true;
//Check if this relation is replaced by an added one due to a multiplicity constraint
InternalRelationType type = (InternalRelationType)result.getType();
InternalVertex vertex = query.getVertex();
if (type.getMultiplicity().isConstrained() && vertex.hasAddedRelations()) {
final RelationComparator comparator = new RelationComparator(vertex);
if (!Iterables.isEmpty(vertex.getAddedRelations(new Predicate<InternalRelation>() {
@Override
public boolean apply(@Nullable InternalRelation internalRelation) {
return comparator.compare((InternalRelation) result, internalRelation) == 0;
}
}))) return true;
}
return false;
}
@Override
public Iterator<TitanRelation> execute(final VertexCentricQuery query, final SliceQuery sq, final Object exeInfo) {
assert exeInfo==null;
if (query.getVertex().isNew())
return Iterators.emptyIterator();
final InternalVertex v = query.getVertex();
EntryList iter = v.loadRelations(sq, new Retriever<SliceQuery, EntryList>() {
@Override
public EntryList get(SliceQuery query) {
return graph.edgeQuery(v.getLongId(), query, txHandle);
}
});
return RelationConstructor.readRelation(v, iter, StandardTitanTx.this).iterator();
}
};
public final QueryExecutor<GraphCentricQuery, TitanElement, JointIndexQuery> elementProcessor;
public final QueryExecutor<GraphCentricQuery, TitanElement, JointIndexQuery> elementProcessorImpl = new QueryExecutor<GraphCentricQuery, TitanElement, JointIndexQuery>() {
private PredicateCondition<PropertyKey, TitanElement> getEqualityCondition(Condition<TitanElement> condition) {
if (condition instanceof PredicateCondition) {
PredicateCondition<PropertyKey, TitanElement> pc = (PredicateCondition) condition;
if (pc.getPredicate() == Cmp.EQUAL && TypeUtil.hasSimpleInternalVertexKeyIndex(pc.getKey())) return pc;
} else if (condition instanceof And) {
for (Condition<TitanElement> child : ((And<TitanElement>) condition).getChildren()) {
PredicateCondition<PropertyKey, TitanElement> p = getEqualityCondition(child);
if (p != null) return p;
}
}
return null;
}
@Override
public Iterator<TitanElement> getNew(final GraphCentricQuery query) {
//If the query is unconstrained then we don't need to add new elements, so will be picked up by getVertices()/getEdges() below
if (query.numSubQueries()==1 && query.getSubQuery(0).getBackendQuery().isEmpty()) return Iterators.emptyIterator();
Preconditions.checkArgument(query.getCondition().hasChildren(),"If the query is non-empty it needs to have a condition");
if (query.getResultType() == ElementCategory.VERTEX && hasModifications()) {
Preconditions.checkArgument(QueryUtil.isQueryNormalForm(query.getCondition()));
PredicateCondition<PropertyKey, TitanElement> standardIndexKey = getEqualityCondition(query.getCondition());
Iterator<TitanVertex> vertices;
if (standardIndexKey == null) {
final Set<PropertyKey> keys = Sets.newHashSet();
ConditionUtil.traversal(query.getCondition(), new Predicate<Condition<TitanElement>>() {
@Override
public boolean apply(@Nullable Condition<TitanElement> cond) {
Preconditions.checkArgument(cond.getType() != Condition.Type.LITERAL || cond instanceof PredicateCondition);
if (cond instanceof PredicateCondition)
keys.add(((PredicateCondition<PropertyKey, TitanElement>) cond).getKey());
return true;
}
});
Preconditions.checkArgument(!keys.isEmpty(), "Invalid query condition: %s", query.getCondition());
Set<TitanVertex> vertexSet = Sets.newHashSet();
for (TitanRelation r : addedRelations.getView(new Predicate<InternalRelation>() {
@Override
public boolean apply(@Nullable InternalRelation relation) {
return keys.contains(relation.getType());
}
})) {
vertexSet.add(((TitanProperty) r).getVertex());
}
for (TitanRelation r : deletedRelations.values()) {
if (keys.contains(r.getType())) {
TitanVertex v = ((TitanProperty) r).getVertex();
if (!v.isRemoved()) vertexSet.add(v);
}
}
vertices = vertexSet.iterator();
} else {
vertices = Iterators.transform(newVertexIndexEntries.get(standardIndexKey.getValue(), standardIndexKey.getKey()).iterator(), new Function<TitanProperty, TitanVertex>() {
@Nullable
@Override
public TitanVertex apply(@Nullable TitanProperty o) {
return o.getVertex();
}
});
}
return (Iterator) Iterators.filter(vertices, new Predicate<TitanVertex>() {
@Override
public boolean apply(@Nullable TitanVertex vertex) {
return query.matches(vertex);
}
});
} else if ( (query.getResultType() == ElementCategory.EDGE || query.getResultType()==ElementCategory.PROPERTY)
&& !addedRelations.isEmpty()) {
return (Iterator) addedRelations.getView(new Predicate<InternalRelation>() {
@Override
public boolean apply(@Nullable InternalRelation relation) {
return query.getResultType().isInstance(relation) && !relation.isHidden() && query.matches(relation);
}
}).iterator();
} else return Iterators.emptyIterator();
}
@Override
public boolean hasDeletions(GraphCentricQuery query) {
return hasModifications();
}
@Override
public boolean isDeleted(GraphCentricQuery query, TitanElement result) {
if (result == null || result.isRemoved()) return true;
else if (query.getResultType() == ElementCategory.VERTEX) {
Preconditions.checkArgument(result instanceof InternalVertex);
InternalVertex v = ((InternalVertex) result).it();
if (v.hasAddedRelations() || v.hasRemovedRelations()) {
return !query.matches(result);
} else return false;
} else if (query.getResultType() == ElementCategory.EDGE || query.getResultType()==ElementCategory.PROPERTY) {
Preconditions.checkArgument(result.isLoaded() || result.isNew());
//Loaded relations are immutable so we don't need to check those
//New relations could be modified in this transaction to now longer match the query, hence we need to
//check for this case and consider the relations deleted
return result.isNew() && !query.matches(result);
} else throw new IllegalArgumentException("Unexpected type: " + query.getResultType());
}
@Override
public Iterator<TitanElement> execute(final GraphCentricQuery query, final JointIndexQuery indexQuery, final Object exeInfo) {
Iterator<TitanElement> iter;
if (!indexQuery.isEmpty()) {
List<QueryUtil.IndexCall<Object>> retrievals = new ArrayList<QueryUtil.IndexCall<Object>>();
for (int i = 0; i < indexQuery.size(); i++) {
final JointIndexQuery.Subquery subquery = indexQuery.getQuery(i);
retrievals.add(new QueryUtil.IndexCall<Object>() {
@Override
public Collection<Object> call(int limit) {
final JointIndexQuery.Subquery adjustedQuery = subquery.updateLimit(limit);
try {
return indexCache.get(adjustedQuery, new Callable<List<Object>>() {
@Override
public List<Object> call() throws Exception {
return indexSerializer.query(adjustedQuery, txHandle);
}
});
} catch (Exception e) {
throw new TitanException("Could not call index", e.getCause());
}
}
});
}
List<Object> resultSet = QueryUtil.processIntersectingRetrievals(retrievals, indexQuery.getLimit());
iter = Iterators.transform(resultSet.iterator(), getConversionFunction(query.getResultType()));
} else {
if (config.hasForceIndexUsage()) throw new TitanException("Could not find a suitable index to answer graph query and graph scans are disabled: " + query);
log.warn("Query requires iterating over all vertices [{}]. For better performance, use indexes", query.getCondition());
switch (query.getResultType()) {
case VERTEX:
return (Iterator) getVertices().iterator();
case EDGE:
return (Iterator) getEdges().iterator();
case PROPERTY:
return new VertexCentricEdgeIterable(getInternalVertices(),RelationCategory.PROPERTY).iterator();
default:
throw new IllegalArgumentException("Unexpected type: " + query.getResultType());
}
}
return iter;
}
};
public Function<Object, ? extends TitanElement> getConversionFunction(final ElementCategory elementCategory) {
switch (elementCategory) {
case VERTEX:
return vertexIDConversionFct;
case EDGE:
return edgeIDConversionFct;
case PROPERTY:
return propertyIDConversionFct;
default:
throw new IllegalArgumentException("Unexpected result type: " + elementCategory);
}
}
private final Function<Object, TitanVertex> vertexIDConversionFct = new Function<Object, TitanVertex>() {
@Override
public TitanVertex apply(@Nullable Object id) {
Preconditions.checkNotNull(id);
Preconditions.checkArgument(id instanceof Long);
return getInternalVertex((Long) id);
}
};
private final Function<Object, TitanEdge> edgeIDConversionFct = new Function<Object, TitanEdge>() {
@Override
public TitanEdge apply(@Nullable Object id) {
Preconditions.checkNotNull(id);
Preconditions.checkArgument(id instanceof RelationIdentifier);
return ((RelationIdentifier)id).findEdge(StandardTitanTx.this);
}
};
private final Function<Object, TitanProperty> propertyIDConversionFct = new Function<Object, TitanProperty>() {
@Override
public TitanProperty apply(@Nullable Object id) {
Preconditions.checkNotNull(id);
Preconditions.checkArgument(id instanceof RelationIdentifier);
return ((RelationIdentifier)id).findProperty(StandardTitanTx.this);
}
};
@Override
public GraphCentricQueryBuilder query() {
return new GraphCentricQueryBuilder(this, graph.getIndexSerializer());
}
@Override
public TitanIndexQuery indexQuery(String indexName, String query) {
return new IndexQueryBuilder(this,indexSerializer).setIndex(indexName).setQuery(query);
}
@Override
public Iterable<TitanVertex> getVertices(PropertyKey key, Object attribute) {
Preconditions.checkNotNull(key);
Preconditions.checkNotNull(attribute);
return (Iterable) query().has(key, Cmp.EQUAL, attribute).vertices();
}
@Override
public Iterable<TitanEdge> getEdges(PropertyKey key, Object attribute) {
Preconditions.checkNotNull(key);
Preconditions.checkNotNull(attribute);
return (Iterable) query().has(key, Cmp.EQUAL, attribute).edges();
}
/*
* ------------------------------------ Transaction State ------------------------------------
*/
@Override
public synchronized void commit() {
Preconditions.checkArgument(isOpen(), "The transaction has already been closed");
boolean success = false;
if (null != config.getGroupName()) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "tx", "commit").inc();
}
try {
if (hasModifications()) {
graph.commit(addedRelations.getAll(), deletedRelations.values(), this);
} else {
txHandle.commit();
}
success = true;
} catch (Exception e) {
try {
txHandle.rollback();
} catch (BackendException e1) {
throw new TitanException("Could not rollback after a failed commit", e);
}
throw new TitanException("Could not commit transaction due to exception during persistence", e);
} finally {
close();
if (null != config.getGroupName() && !success) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "tx", "commit.exceptions").inc();
}
}
}
@Override
public synchronized void rollback() {
Preconditions.checkArgument(isOpen(), "The transaction has already been closed");
boolean success = false;
if (null != config.getGroupName()) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "tx", "rollback").inc();
}
try {
txHandle.rollback();
success = true;
} catch (Exception e) {
throw new TitanException("Could not rollback transaction due to exception", e);
} finally {
close();
if (null != config.getGroupName() && !success) {
MetricManager.INSTANCE.getCounter(config.getGroupName(), "tx", "rollback.exceptions").inc();
}
}
}
private void close() {
//TODO: release non crucial data structures to preserve memory?
isOpen = false;
graph.closeTransaction(this);
vertexCache.close();
}
@Override
public final boolean isOpen() {
return isOpen;
}
@Override
public final boolean isClosed() {
return !isOpen;
}
@Override
public boolean hasModifications() {
return !addedRelations.isEmpty() || !deletedRelations.isEmpty();
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.instructions.AssignInstruction;
import com.intellij.codeInspection.dataFlow.instructions.ConditionalGotoInstruction;
import com.intellij.codeInspection.dataFlow.instructions.ExpressionPushingInstruction;
import com.intellij.codeInspection.dataFlow.instructions.Instruction;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet;
import com.intellij.codeInspection.dataFlow.types.*;
import com.intellij.codeInspection.dataFlow.value.*;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.ExpressionUtils;
import one.util.streamex.EntryStream;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.Predicate;
public class TrackingDfaMemoryState extends DfaMemoryStateImpl {
private MemoryStateChange myHistory;
protected TrackingDfaMemoryState(DfaValueFactory factory) {
super(factory);
myHistory = null;
}
protected TrackingDfaMemoryState(TrackingDfaMemoryState toCopy) {
super(toCopy);
myHistory = toCopy.myHistory;
}
@NotNull
@Override
public TrackingDfaMemoryState createCopy() {
return new TrackingDfaMemoryState(this);
}
@Override
protected void afterMerge(DfaMemoryStateImpl other) {
super.afterMerge(other);
assert other instanceof TrackingDfaMemoryState;
myHistory = myHistory.merge(((TrackingDfaMemoryState)other).myHistory);
}
private Map<DfaVariableValue, Set<Relation>> getRelations() {
Map<DfaVariableValue, Set<Relation>> result = new HashMap<>();
forVariableStates((var, state) -> {
if (state.myDfType instanceof DfConstantType) {
result.computeIfAbsent(var, k -> new HashSet<>()).add(new Relation(RelationType.EQ, getFactory().fromDfType(state.myDfType)));
}
if (state.myDfType instanceof DfAntiConstantType) {
Set<?> notValues = ((DfAntiConstantType<?>)state.myDfType).getNotValues();
PsiType varType = var.getType();
if (!notValues.isEmpty() && varType != null) {
for (Object notValue : notValues) {
result.computeIfAbsent(var, k -> new HashSet<>()).add(
new Relation(RelationType.NE, getFactory().fromDfType(DfTypes.constant(notValue, varType))));
}
}
}
});
for (EqClass eqClass : getNonTrivialEqClasses()) {
for (DfaVariableValue var : eqClass) {
Set<Relation> set = result.computeIfAbsent(var, k -> new HashSet<>());
for (DfaVariableValue eqVar : eqClass) {
if (eqVar != var) {
set.add(new Relation(RelationType.EQ, eqVar));
}
}
}
}
for (DistinctPairSet.DistinctPair classPair : getDistinctClassPairs()) {
EqClass first = classPair.getFirst();
EqClass second = classPair.getSecond();
RelationType plain = classPair.isOrdered() ? RelationType.LT : RelationType.NE;
RelationType flipped = Objects.requireNonNull(plain.getFlipped());
for (DfaVariableValue var1 : first) {
for (DfaVariableValue var2 : second) {
result.computeIfAbsent(var1, k -> new HashSet<>()).add(new Relation(plain, var2));
result.computeIfAbsent(var2, k -> new HashSet<>()).add(new Relation(flipped, var1));
}
}
}
return result;
}
void recordChange(Instruction instruction, TrackingDfaMemoryState previous) {
Map<DfaVariableValue, Change> result = getChangeMap(previous);
DfaValue value = isEmptyStack() ? getFactory().getUnknown() : peek();
myHistory = MemoryStateChange.create(myHistory, instruction, result, value);
}
@NotNull
private Map<DfaVariableValue, Change> getChangeMap(TrackingDfaMemoryState previous) {
Map<DfaVariableValue, Change> changeMap = new HashMap<>();
Set<DfaVariableValue> varsToCheck = new HashSet<>();
previous.forVariableStates((value, state) -> varsToCheck.add(value));
forVariableStates((value, state) -> varsToCheck.add(value));
for (DfaVariableValue value : varsToCheck) {
DfType newType = getVariableState(value).myDfType;
DfType oldType = previous.getVariableState(value).myDfType;
if (!newType.equals(oldType)) {
changeMap.put(value, new Change(Collections.emptySet(), Collections.emptySet(), oldType, newType));
}
}
Map<DfaVariableValue, Set<Relation>> oldRelations = previous.getRelations();
Map<DfaVariableValue, Set<Relation>> newRelations = getRelations();
varsToCheck.clear();
varsToCheck.addAll(oldRelations.keySet());
varsToCheck.addAll(newRelations.keySet());
for (DfaVariableValue value : varsToCheck) {
Set<Relation> oldValueRelations = oldRelations.getOrDefault(value, Collections.emptySet());
Set<Relation> newValueRelations = newRelations.getOrDefault(value, Collections.emptySet());
if (!oldValueRelations.equals(newValueRelations)) {
Set<Relation> added = new HashSet<>(newValueRelations);
added.removeAll(oldValueRelations);
Set<Relation> removed = new HashSet<>(oldValueRelations);
removed.removeAll(newValueRelations);
changeMap.compute(
value, (v, change) -> change == null
? Change.create(removed, added, DfTypes.BOTTOM, DfTypes.BOTTOM)
: Change.create(removed, added, change.myOldType, change.myNewType));
}
}
return changeMap;
}
MemoryStateChange getHistory() {
return myHistory;
}
/**
* Records a bridge changes. A bridge states are states which process the same input instruction,
* but in result jump to another place in the program (other than this state target).
* A bridge change is the difference between this state and all states which have different
* target instruction. Bridges allow to track what else is processed in parallel with current state,
* including states which may not arrive into target place. E.g. consider two states like this:
*
* <pre>
* this_state other_state
* | |
* some_condition <-- bridge is recorded here
* |(true) |(false)
* | return
* |
* always_true_condition <-- explanation is requested here
* </pre>
*
* Thanks to the bridge we know that {@code some_condition} could be important for
* {@code always_true_condition} explanation.
*
* @param instruction instruction which
* @param bridgeStates
*/
void addBridge(Instruction instruction, List<TrackingDfaMemoryState> bridgeStates) {
Map<DfaVariableValue, Change> changeMap = null;
for (TrackingDfaMemoryState bridge : bridgeStates) {
Map<DfaVariableValue, Change> newChangeMap = getChangeMap(bridge);
if (changeMap == null) {
changeMap = newChangeMap;
} else {
changeMap.keySet().retainAll(newChangeMap.keySet());
changeMap.replaceAll((var, old) -> old.unite(newChangeMap.get(var)));
changeMap.values().removeIf(Objects::isNull);
}
if (changeMap.isEmpty()) {
break;
}
}
if (changeMap != null && !changeMap.isEmpty()) {
myHistory = myHistory.withBridge(instruction, changeMap);
}
}
static class Relation {
final @NotNull RelationType myRelationType;
final @NotNull DfaValue myCounterpart;
Relation(@NotNull RelationType type, @NotNull DfaValue counterpart) {
myRelationType = type;
myCounterpart = counterpart;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Relation relation = (Relation)o;
return myRelationType == relation.myRelationType &&
myCounterpart.equals(relation.myCounterpart);
}
@Override
public int hashCode() {
return Objects.hash(myRelationType, myCounterpart);
}
@Override
public String toString() {
return myRelationType + " " + myCounterpart;
}
}
static class Change {
final @NotNull Set<Relation> myRemovedRelations;
final @NotNull Set<Relation> myAddedRelations;
final @NotNull DfType myOldType;
final @NotNull DfType myNewType;
private Change(@NotNull Set<Relation> removedRelations, @NotNull Set<Relation> addedRelations, @NotNull DfType oldType, @NotNull DfType newType) {
myRemovedRelations = removedRelations.isEmpty() ? Collections.emptySet() : removedRelations;
myAddedRelations = addedRelations.isEmpty() ? Collections.emptySet() : addedRelations;
myOldType = oldType;
myNewType = newType;
}
@Nullable
static Change create(Set<Relation> removedRelations, Set<Relation> addedRelations, DfType oldType, DfType newType) {
if (removedRelations.isEmpty() && addedRelations.isEmpty() && oldType == DfTypes.BOTTOM && newType == DfTypes.BOTTOM) {
return null;
}
return new Change(removedRelations, addedRelations, oldType, newType);
}
/**
* Creates a Change which reflects changes actual for both this and other change
* @param other other change to unite with
* @return new change or null if this and other change has nothing in common
*/
@Nullable
Change unite(Change other) {
Set<Relation> added = new HashSet<>(ContainerUtil.intersection(myAddedRelations, other.myAddedRelations));
Set<Relation> removed = new HashSet<>(ContainerUtil.intersection(myRemovedRelations, other.myRemovedRelations));
DfType oldType = myOldType.join(other.myOldType);
DfType newType = myNewType.join(other.myNewType);
if (oldType.equals(newType)) {
oldType = newType = DfTypes.BOTTOM;
}
return create(removed, added, oldType, newType);
}
@Override
public String toString() {
String removed = StreamEx.of(myRemovedRelations).map(Object::toString).append(myOldType.toString())
.without("").joining(", ");
String added = StreamEx.of(myAddedRelations).map(Object::toString).append(myNewType.toString())
.without("").joining(", ");
return (removed.isEmpty() ? "" : "-{" + removed + "} ") + (added.isEmpty() ? "" : "+{" + added + "}");
}
}
static final class MemoryStateChange {
private final @NotNull List<MemoryStateChange> myPrevious;
final @NotNull Instruction myInstruction;
final @NotNull Map<DfaVariableValue, Change> myChanges;
final @NotNull DfaValue myTopOfStack;
final @NotNull Map<DfaVariableValue, Change> myBridgeChanges;
int myCursor = 0;
private MemoryStateChange(@NotNull List<MemoryStateChange> previous,
@NotNull Instruction instruction,
@NotNull Map<DfaVariableValue, Change> changes,
@NotNull DfaValue topOfStack,
@NotNull Map<DfaVariableValue, Change> bridgeChanges) {
myPrevious = previous;
myInstruction = instruction;
myChanges = changes;
myTopOfStack = topOfStack;
myBridgeChanges = bridgeChanges;
}
void reset() {
for (MemoryStateChange change = this; change != null; change = change.getPrevious()) {
change.myCursor = 0;
}
}
boolean advance() {
if (myCursor < myPrevious.size() && !myPrevious.get(myCursor).advance()) {
myCursor++;
MemoryStateChange previous = getPrevious();
if (previous != null) {
previous.reset();
}
}
return myCursor < myPrevious.size();
}
@Contract("null -> null")
@Nullable
MemoryStateChange findExpressionPush(@Nullable PsiExpression expression) {
if (expression == null) return null;
return findChange(change -> change.getExpression() == expression, false);
}
@Contract("null -> null")
@Nullable
MemoryStateChange findSubExpressionPush(@Nullable PsiExpression expression) {
if (expression == null) return null;
PsiElement topElement = ExpressionUtils.getPassThroughParent(expression);
return findChange(change -> {
PsiExpression changeExpression = change.getExpression();
if (changeExpression == null) return false;
return changeExpression == expression ||
(PsiTreeUtil.isAncestor(expression, changeExpression, true) &&
ExpressionUtils.getPassThroughParent(changeExpression) == topElement);
}, false);
}
MemoryStateChange findRelation(DfaVariableValue value, @NotNull Predicate<Relation> relationPredicate, boolean startFromSelf) {
return findChange(change -> {
if (change.myInstruction instanceof AssignInstruction && change.myTopOfStack == value) return true;
Change varChange = change.myChanges.get(value);
if (varChange != null && varChange.myAddedRelations.stream().anyMatch(relationPredicate)) return true;
Change bridgeVarChange = change.myBridgeChanges.get(value);
return bridgeVarChange != null && bridgeVarChange.myAddedRelations.stream().anyMatch(relationPredicate);
}, startFromSelf);
}
@NotNull
<T> FactDefinition<T> findFact(DfaValue value, FactExtractor<T> extractor) {
if (value instanceof DfaVariableValue) {
for (MemoryStateChange change = this; change != null; change = change.getPrevious()) {
FactDefinition<T> factPair = factFromChange(extractor, change, change.myChanges.get(value));
if (factPair != null) return factPair;
if (!(change.myInstruction instanceof ConditionalGotoInstruction)) {
factPair = factFromChange(extractor, change, change.myBridgeChanges.get(value));
if (factPair != null) return factPair;
}
if (change.myInstruction instanceof AssignInstruction && change.myTopOfStack == value && change.getPrevious() != null) {
FactDefinition<T> fact = change.getPrevious().findFact(value, extractor);
return new FactDefinition<>(change, fact.myFact);
}
}
return new FactDefinition<>(null, extractor.extract(((DfaVariableValue)value).getInherentType()));
}
if (value instanceof DfaBinOpValue) {
FactDefinition<T> left = findFact(((DfaBinOpValue)value).getLeft(), extractor);
FactDefinition<T> right = findFact(((DfaBinOpValue)value).getRight(), extractor);
if (left.myFact instanceof LongRangeSet && right.myFact instanceof LongRangeSet) {
@SuppressWarnings("unchecked") T result = (T)((LongRangeSet)left.myFact).binOpFromToken(
((DfaBinOpValue)value).getTokenType(), ((LongRangeSet)right.myFact), PsiType.LONG.equals(value.getType()));
return new FactDefinition<>(null, Objects.requireNonNull(result));
}
}
return new FactDefinition<>(null, extractor.extract(value.getDfType()));
}
@Nullable
MemoryStateChange getPrevious() {
return myCursor == myPrevious.size() ? null : myPrevious.get(myCursor);
}
public MemoryStateChange getNonMerge() {
MemoryStateChange change = myInstruction instanceof MergeInstruction ? getPrevious() : this;
assert change == null || !(change.myInstruction instanceof MergeInstruction);
return change;
}
@Nullable
private static <T> FactDefinition<T> factFromChange(FactExtractor<T> extractor, MemoryStateChange change, Change varChange) {
if (varChange != null) {
T newFact = extractor.extract(varChange.myNewType);
T oldFact = extractor.extract(varChange.myOldType);
if (!newFact.equals(oldFact)) {
return new FactDefinition<>(change, newFact);
}
}
return null;
}
@Nullable
private MemoryStateChange findChange(@NotNull Predicate<MemoryStateChange> predicate, boolean startFromSelf) {
for (MemoryStateChange change = startFromSelf ? this : getPrevious(); change != null; change = change.getPrevious()) {
if (predicate.test(change)) {
return change;
}
}
return null;
}
@Nullable
PsiExpression getExpression() {
if (myInstruction instanceof ExpressionPushingInstruction &&
((ExpressionPushingInstruction<?>)myInstruction).getExpressionRange() == null) {
return ((ExpressionPushingInstruction<?>)myInstruction).getExpression();
}
if (myInstruction instanceof ConditionalGotoInstruction) {
return ObjectUtils.tryCast(((ConditionalGotoInstruction)myInstruction).getPsiAnchor(), PsiExpression.class);
}
return null;
}
@NotNull
public MemoryStateChange merge(MemoryStateChange change) {
if (change == this) return this;
Set<MemoryStateChange> previous = new LinkedHashSet<>();
if (myInstruction instanceof MergeInstruction) {
previous.addAll(myPrevious);
} else {
previous.add(this);
}
if (change.myInstruction instanceof MergeInstruction) {
previous.addAll(change.myPrevious);
} else {
previous.add(change);
}
if (previous.size() == 1) {
return previous.iterator().next();
}
return new MemoryStateChange(new ArrayList<>(previous), new MergeInstruction(), Collections.emptyMap(), myTopOfStack.getFactory().getUnknown(),
Collections.emptyMap());
}
MemoryStateChange withBridge(@NotNull Instruction instruction, @NotNull Map<DfaVariableValue, Change> bridge) {
if (myInstruction != instruction) {
if (instruction instanceof ConditionalGotoInstruction &&
getExpression() == ((ConditionalGotoInstruction)instruction).getPsiAnchor()) {
instruction = myInstruction;
} else {
return new MemoryStateChange(
Collections.singletonList(this), instruction, Collections.emptyMap(), myTopOfStack.getFactory().getUnknown(), bridge);
}
}
assert myBridgeChanges.isEmpty();
return new MemoryStateChange(myPrevious, instruction, myChanges, myTopOfStack, bridge);
}
@Nullable
static MemoryStateChange create(@Nullable MemoryStateChange previous,
@NotNull Instruction instruction,
@NotNull Map<DfaVariableValue, Change> result,
@NotNull DfaValue value) {
if (result.isEmpty() && DfaTypeValue.isUnknown(value)) {
return previous;
}
return new MemoryStateChange(ContainerUtil.createMaybeSingletonList(previous), instruction, result, value, Collections.emptyMap());
}
MemoryStateChange[] flatten() {
List<MemoryStateChange> changes = StreamEx.iterate(this, Objects::nonNull, change -> change.getPrevious()).toList();
Collections.reverse(changes);
return changes.toArray(new MemoryStateChange[0]);
}
String dump() {
return StreamEx.of(flatten()).joining("\n");
}
@Override
public String toString() {
return myInstruction.getIndex() + " " + myInstruction + ": " + myTopOfStack +
(myChanges.isEmpty() ? "" :
"; Changes: " + EntryStream.of(myChanges).join(": ", "\n\t", "").joining()) +
(myBridgeChanges.isEmpty() ? "" :
"; Bridge changes: " + EntryStream.of(myBridgeChanges).join(": ", "\n\t", "").joining());
}
}
private static class MergeInstruction extends Instruction {
@Override
public DfaInstructionState[] accept(DataFlowRunner runner, DfaMemoryState stateBefore, InstructionVisitor visitor) {
return DfaInstructionState.EMPTY_ARRAY;
}
@Override
public String toString() {
return "STATE_MERGE";
}
}
static class FactDefinition<T> {
final @Nullable MemoryStateChange myChange;
final @NotNull T myFact;
FactDefinition(@Nullable MemoryStateChange change, @NotNull T fact) {
myChange = change;
myFact = fact;
}
@Override
public String toString() {
return myFact + " @ " + myChange;
}
}
interface FactExtractor<T> {
@NotNull T extract(DfType type);
static FactExtractor<DfaNullability> nullability() {
return DfaNullability::fromDfType;
}
static FactExtractor<TypeConstraint> constraint() {
return TypeConstraint::fromDfType;
}
static FactExtractor<LongRangeSet> range() {
return DfLongType::extractRange;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.sling.testing.clients.osgi;
import org.apache.http.Header;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.testing.clients.ClientException;
import org.apache.sling.testing.clients.SlingHttpResponse;
import org.apache.sling.testing.clients.util.JsonUtils;
import org.apache.sling.testing.clients.util.poller.AbstractPoller;
import org.apache.sling.testing.clients.SlingClient;
import org.apache.sling.testing.clients.SlingClientConfig;
import org.apache.sling.testing.clients.util.FormEntityBuilder;
import org.apache.sling.testing.clients.util.HttpUtils;
import org.apache.sling.testing.clients.util.poller.PathPoller;
import org.codehaus.jackson.JsonNode;
import org.osgi.framework.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import static org.apache.http.HttpStatus.SC_MOVED_TEMPORARILY;
import static org.apache.http.HttpStatus.SC_OK;
/**
* A client that wraps the Felix OSGi Web Console REST API calls.
* @see <a href=http://felix.apache.org/documentation/subprojects/apache-felix-web-console/web-console-restful-api.html>
* Web Console RESTful API</a>
*/
public class OsgiConsoleClient extends SlingClient {
private static final Logger LOG = LoggerFactory.getLogger(OsgiConsoleClient.class);
/**
* All System Console REST API calls go to /system/console and below
*/
private final String CONSOLE_ROOT_URL = "/system/console";
/**
* The URL for configuration requests
*/
private final String URL_CONFIGURATION = CONSOLE_ROOT_URL + "/configMgr";
/**
* The URL for bundle requests
*/
private final String URL_BUNDLES = CONSOLE_ROOT_URL + "/bundles";
/**
* The URL for components requests
*/
private final String URL_COMPONENTS = CONSOLE_ROOT_URL + "/components";
public static final String JSON_KEY_ID = "id";
public static final String JSON_KEY_VERSION = "version";
public static final String JSON_KEY_DATA = "data";
public static final String JSON_KEY_STATE = "state";
/**
* Default constructor. Simply calls {@link SlingClient#SlingClient(URI, String, String)}
*
* @param serverUrl the URL to the server under test
* @param userName the user name used for authentication
* @param password the password for this user
* @throws ClientException if the client cannot be instantiated
*/
public OsgiConsoleClient(URI serverUrl, String userName, String password) throws ClientException {
super(serverUrl, userName, password);
}
/**
* Constructor used by adaptTo() and InternalBuilder classes. Should not be called directly in the code
*
* @param http http client to be used for requests
* @param config sling specific configs
* @throws ClientException if the client cannot be instantiated
*/
public OsgiConsoleClient(CloseableHttpClient http, SlingClientConfig config) throws ClientException {
super(http, config);
}
/**
* Returns the wrapper for the bundles info json
*
* @param expectedStatus list of accepted statuses of the response
* @return all the bundles info
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public BundlesInfo getBundlesInfo(int... expectedStatus) throws ClientException {
// request the bundles information
SlingHttpResponse resp = this.doGet(URL_BUNDLES + ".json", HttpUtils.getExpectedStatus(SC_OK, expectedStatus));
// return the wrapper
return new BundlesInfo(JsonUtils.getJsonNodeFromString(resp.getContent()));
}
/**
* Returns the wrapper for the bundle info json
*
* @param id the id of the bundle
* @param expectedStatus list of accepted statuses of the response
* @return the bundle info
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public BundleInfo getBundleInfo(String id, int... expectedStatus) throws ClientException {
SlingHttpResponse resp = this.doGet(URL_BUNDLES + "/" + id + ".json");
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(SC_OK, expectedStatus));
return new BundleInfo(JsonUtils.getJsonNodeFromString(resp.getContent()));
}
/**
* Returns the wrapper for the components info json
*
* @param expectedStatus list of accepted statuses of the response
* @return the components info
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public ComponentsInfo getComponentsInfo(int... expectedStatus) throws ClientException {
SlingHttpResponse resp = this.doGet(URL_COMPONENTS + ".json");
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(SC_OK, expectedStatus));
return new ComponentsInfo(JsonUtils.getJsonNodeFromString(resp.getContent()));
}
/**
* Returns the wrapper for the component info json
*
* @param id the id of the component
* @param expectedStatus list of accepted statuses of the response
* @return the component info
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public ComponentInfo getComponentInfo(String id, int expectedStatus) throws ClientException {
SlingHttpResponse resp = this.doGet(URL_COMPONENTS + "/" + id + ".json");
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(SC_OK, expectedStatus));
return new ComponentInfo(JsonUtils.getJsonNodeFromString(resp.getContent()));
}
//
// OSGi configurations
//
/**
* Returns a map of all properties set for the config referenced by the PID, where the map keys
* are the property names.
*
* @param pid the pid of the configuration
* @param expectedStatus list of accepted statuses of the response
* @return the properties as a map
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public Map<String, Object> getConfiguration(String pid, int... expectedStatus) throws ClientException {
// make the request
SlingHttpResponse resp = this.doPost(URL_CONFIGURATION + "/" + pid, null);
// check the returned status
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(SC_OK, expectedStatus));
// get the JSON node
JsonNode rootNode = JsonUtils.getJsonNodeFromString(resp.getContent());
// go through the params
Map<String, Object> props = new HashMap<String, Object>();
if(rootNode.get("properties") == null)
return props;
JsonNode properties = rootNode.get("properties");
for(Iterator<String> it = properties.getFieldNames(); it.hasNext();) {
String propName = it.next();
JsonNode value = properties.get(propName).get("value");
if(value != null) {
props.put(propName, value.getValueAsText());
continue;
}
value = properties.get(propName).get("values");
if(value != null) {
Iterator<JsonNode> iter = value.getElements();
List<String> list = new ArrayList<String>();
while(iter.hasNext()) {
list.add(iter.next().getValueAsText());
}
props.put(propName, list.toArray(new String[list.size()]));
}
}
return props;
}
/**
* Returns a map of all properties set for the config referenced by the PID, where the map keys
* are the property names. The method waits until the configuration has been set.
*
* @param waitCount The number of maximum wait intervals of 500ms.
* Between each wait interval, the method polls the backend to see if the configuration ahs been set.
* @param pid pid
* @param expectedStatus expected response status
* @return the config properties
* @throws ClientException if the response status does not match any of the expectedStatus
* @throws InterruptedException to mark this operation as "waiting"
*/
public Map<String, Object> getConfigurationWithWait(long waitCount, String pid, int... expectedStatus)
throws ClientException, InterruptedException {
ConfigurationPoller poller = new ConfigurationPoller(500L, waitCount, pid, expectedStatus);
if (!poller.callUntilCondition())
return getConfiguration(pid, expectedStatus);
return poller.getConfig();
}
/**
* Sets properties of a config referenced by its PID. the properties to be edited are passed as
* a map of property name,value pairs.
*
* @param PID Persistent identity string
* @param factoryPID Factory persistent identity string or {@code null}
* @param configProperties map of properties
* @param expectedStatus expected response status
* @return the location of the config
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public String editConfiguration(String PID, String factoryPID, Map<String, Object> configProperties, int... expectedStatus)
throws ClientException {
FormEntityBuilder builder = FormEntityBuilder.create();
builder.addParameter("apply", "true");
builder.addParameter("action", "ajaxConfigManager");
// send factory PID if set
if (factoryPID != null) {
builder.addParameter("factoryPid", factoryPID);
}
// add properties to edit
StringBuilder propertyList = new StringBuilder("");
for (String propName : configProperties.keySet()) {
Object o = configProperties.get(propName);
if (o instanceof String) {
builder.addParameter(propName, (String)o);
} else if (o instanceof String[]) {
for (String s : (String[])o) {
builder.addParameter(propName, s);
}
}
propertyList.append(propName).append(",");
}
// cut off the last comma
builder.addParameter("propertylist", propertyList.substring(0, propertyList.length() - 1));
// make the request
SlingHttpResponse resp = this.doPost(URL_CONFIGURATION + "/" + PID, builder.build());
// check the returned status
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(SC_MOVED_TEMPORARILY, expectedStatus));
Header[] locationHeader = resp.getHeaders("Location");
if (locationHeader!=null && locationHeader.length==1) {
return locationHeader[0].getValue().substring(URL_CONFIGURATION.length()+1);
} else {
return null;
}
}
/**
* Sets properties of a config referenced by its PID. the properties to be edited are passed as
* a map of property (name,value) pairs. The method waits until the configuration has been set.
*
* @param waitCount The number of maximum wait intervals of 500ms.
* Between each wait interval, the method polls the backend to see if the configuration ahs been set.
* @param PID Persistent identity string
* @param factoryPID Factory persistent identity string or {@code null}
* @param configProperties map of properties
* @param expectedStatus expected response status
* @return the pid
* @throws ClientException if the response status does not match any of the expectedStatus
* @throws InterruptedException to mark this operation as "waiting"
*/
public String editConfigurationWithWait(int waitCount, String PID, String factoryPID, Map<String, Object> configProperties,
int... expectedStatus) throws ClientException, InterruptedException {
String pid = editConfiguration(PID, factoryPID, configProperties, expectedStatus);
getConfigurationWithWait(waitCount, pid);
return pid;
}
/**
* Delete the config referenced by the PID
*
* @param pid pid
* @param expectedStatus expected response status
* @return the sling response
* @throws ClientException if the response status does not match any of the expectedStatus
*/
public SlingHttpResponse deleteConfiguration(String pid, int... expectedStatus) throws ClientException {
FormEntityBuilder builder = FormEntityBuilder.create();
builder.addParameter("apply", "1");
builder.addParameter("delete", "1");
// make the request
SlingHttpResponse resp = this.doPost(URL_CONFIGURATION + "/" + pid, builder.build());
// check the returned status
HttpUtils.verifyHttpStatus(resp, HttpUtils.getExpectedStatus(200, expectedStatus));
return resp;
}
//
// Bundles
//
/**
* Uninstall a bundle
* @param symbolicName
* @return the sling response
* @throws ClientException
*/
public SlingHttpResponse uninstallBundle(String symbolicName) throws ClientException {
final long bundleId = getBundleId(symbolicName);
LOG.info("Uninstalling bundle {} with bundleId {}", symbolicName, bundleId);
FormEntityBuilder builder = FormEntityBuilder.create();
builder.addParameter("action", "uninstall");
return this.doPost(getBundlePath(symbolicName), builder.build(), 200);
}
/**
* Install a bundle using the Felix webconsole HTTP interface
* @param f the bundle file
* @param startBundle whether to start the bundle or not
* @return the sling response
* @throws ClientException
*/
public SlingHttpResponse installBundle(File f, boolean startBundle) throws ClientException {
return installBundle(f, startBundle, 0);
}
/**
* Install a bundle using the Felix webconsole HTTP interface, with a specific start level
* @param f
* @param startBundle
* @param startLevel
* @return the sling response
* @throws ClientException
*/
public SlingHttpResponse installBundle(File f, boolean startBundle, int startLevel) throws ClientException {
// Setup request for Felix Webconsole bundle install
MultipartEntityBuilder builder = MultipartEntityBuilder.create()
.addTextBody("action", "install")
.addBinaryBody("bundlefile", f);
if (startBundle) {
builder.addTextBody("bundlestart", "true");
}
if (startLevel > 0) {
builder.addTextBody("bundlestartlevel", String.valueOf(startLevel));
LOG.info("Installing bundle {} at start level {}", f.getName(), startLevel);
} else {
LOG.info("Installing bundle {} at default start level", f.getName());
}
return this.doPost(URL_BUNDLES, builder.build(), 302);
}
/**
* Install a bundle using the Felix webconsole HTTP interface and wait for it to be installed
* @param f the bundle file
* @param startBundle whether to start the bundle or not
* @param startLevel the start level of the bundle. negative values mean default start level
* @param waitTime how long to wait between retries of checking the bundle
* @param retries how many times to check for the bundle to be installed, until giving up
* @return true if the bundle was successfully installed, false otherwise
* @throws ClientException
*/
public boolean installBundleWithRetry(File f, boolean startBundle, int startLevel, int waitTime, int retries)
throws ClientException, InterruptedException {
installBundle(f, startBundle, startLevel);
try {
return this.checkBundleInstalled(OsgiConsoleClient.getBundleSymbolicName(f), waitTime, retries);
} catch (IOException e) {
throw new ClientException("Cannot get bundle symbolic name", e);
}
}
/**
* Check that specified bundle is installed and retries every {{waitTime}} milliseconds, until the
* bundle is installed or the number of retries was reached
* @param symbolicName the name of the bundle
* @param waitTime How many milliseconds to wait between retries
* @param retries the number of retries
* @return true if the bundle was installed until the retries stop, false otherwise
* @throws InterruptedException
*/
public boolean checkBundleInstalled(String symbolicName, int waitTime, int retries) throws InterruptedException {
final String path = getBundlePath(symbolicName, ".json");
return new PathPoller(this, path, waitTime, retries).callAndWait();
}
/**
* Get the id of the bundle
* @param symbolicName
* @return
* @throws Exception
*/
public long getBundleId(String symbolicName) throws ClientException {
final JSONObject bundle = getBundleData(symbolicName);
try {
return bundle.getLong(JSON_KEY_ID);
} catch (JSONException e) {
throw new ClientException("Cannot get id from json", e);
}
}
/**
* Get the version of the bundle
* @param symbolicName
* @return
* @throws ClientException
*/
public String getBundleVersion(String symbolicName) throws ClientException {
final JSONObject bundle = getBundleData(symbolicName);
try {
return bundle.getString(JSON_KEY_VERSION);
} catch (JSONException e) {
throw new ClientException("Cannot get version from json", e);
}
}
/**
* Get the state of the bundle
* @param symbolicName
* @return
* @throws Exception
*/
public String getBundleState(String symbolicName) throws ClientException {
final JSONObject bundle = getBundleData(symbolicName);
try {
return bundle.getString(JSON_KEY_STATE);
} catch (JSONException e) {
throw new ClientException("Cannot get state from json", e);
}
}
/**
* Starts a bundle
* @param symbolicName the name of the bundle
* @throws ClientException
*/
public void startBundle(String symbolicName) throws ClientException {
// To start the bundle we POST action=start to its URL
final String path = getBundlePath(symbolicName);
LOG.info("Starting bundle {} via {}", symbolicName, path);
this.doPost(path, FormEntityBuilder.create().addParameter("action", "start").build(), SC_OK);
}
/**
* Starts a bundle and waits for it to be started
* @param symbolicName the name of the bundle
* @param waitTime How many milliseconds to wait between retries
* @param retries the number of retries
* @throws ClientException, InterruptedException
*/
public void startBundlewithWait(String symbolicName, int waitTime, int retries)
throws ClientException, InterruptedException {
// start a bundle
startBundle(symbolicName);
// wait for it to be in the started state
checkBundleInstalled(symbolicName, waitTime, retries);
}
/**
* Calls PackageAdmin.refreshPackages to force re-wiring of all the bundles.
* @throws ClientException
*/
public void refreshPackages() throws ClientException {
LOG.info("Refreshing packages.");
FormEntityBuilder builder = FormEntityBuilder.create();
builder.addParameter("action", "refreshPackages");
this.doPost(URL_BUNDLES, builder.build(), 200);
}
//
// private methods
//
private String getBundlePath(String symbolicName, String extension) {
return getBundlePath(symbolicName) + extension;
}
private String getBundlePath(String symbolicName) {
return URL_BUNDLES + "/" + symbolicName;
}
private JSONObject getBundleData(String symbolicName) throws ClientException {
// This returns a data structure like
// {"status":"Bundle information: 173 bundles in total - all 173 bundles active.","s":[173,171,2,0,0],"data":
// [
// {"id":0,"name":"System Bundle","fragment":false,"stateRaw":32,"state":"Active","version":"3.0.7","symbolicName":"org.apache.felix.framework","category":""},
// ]}
final String path = getBundlePath(symbolicName, ".json");
final String content = this.doGet(path, SC_OK).getContent();
try {
final JSONObject root = new JSONObject(content);
if (!root.has(JSON_KEY_DATA)) {
throw new ClientException(path + " does not provide '" + JSON_KEY_DATA + "' element, JSON content=" + content);
}
final JSONArray data = root.getJSONArray(JSON_KEY_DATA);
if (data.length() < 1) {
throw new ClientException(path + "." + JSON_KEY_DATA + " is empty, JSON content=" + content);
}
final JSONObject bundle = data.getJSONObject(0);
if (!bundle.has(JSON_KEY_STATE)) {
throw new ClientException(path + ".data[0].state missing, JSON content=" + content);
}
return bundle;
} catch (JSONException e) {
throw new ClientException("Cannot get json", e);
}
}
//
// static methods
//
/**
* Get the symbolic name from a bundle file
* @param bundleFile
* @return
* @throws IOException
*/
public static String getBundleSymbolicName(File bundleFile) throws IOException {
String name = null;
final JarInputStream jis = new JarInputStream(new FileInputStream(bundleFile));
try {
final Manifest m = jis.getManifest();
if (m == null) {
throw new IOException("Manifest is null in " + bundleFile.getAbsolutePath());
}
name = m.getMainAttributes().getValue(Constants.BUNDLE_SYMBOLICNAME);
} finally {
jis.close();
}
return name;
}
/**
* Get the version form a bundle file
* @param bundleFile
* @return
* @throws IOException
*/
public static String getBundleVersionFromFile(File bundleFile) throws IOException {
String version = null;
final JarInputStream jis = new JarInputStream(new FileInputStream(bundleFile));
try {
final Manifest m = jis.getManifest();
if(m == null) {
throw new IOException("Manifest is null in " + bundleFile.getAbsolutePath());
}
version = m.getMainAttributes().getValue(Constants.BUNDLE_VERSION);
} finally {
jis.close();
}
return version;
}
class ConfigurationPoller extends AbstractPoller {
private final String pid;
int[] expectedStatus;
public Map<String, Object> config;
public ConfigurationPoller(long waitInterval, long waitCount, String pid, int... expectedStatus) {
super(waitInterval, waitCount);
this.pid = pid;
this.config = null;
this.expectedStatus = expectedStatus;
}
@Override
public boolean call() {
try {
config = getConfiguration(pid, expectedStatus);
} catch (ClientException e) {
LOG.warn("Couldn't get config " + pid, e);
}
return true;
}
@Override
public boolean condition() {
return null != config;
}
public Map<String, Object> getConfig() {
return config;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.box;
import java.util.Map;
import com.box.boxjavalibv2.BoxClient;
import com.box.boxjavalibv2.resourcemanagers.IBoxResourceManager;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.box.internal.BoxApiCollection;
import org.apache.camel.component.box.internal.BoxApiName;
import org.apache.camel.component.box.internal.BoxClientHelper;
import org.apache.camel.component.box.internal.BoxConstants;
import org.apache.camel.component.box.internal.BoxPropertiesHelper;
import org.apache.camel.component.box.internal.CachedBoxClient;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.component.AbstractApiEndpoint;
import org.apache.camel.util.component.ApiMethod;
import org.apache.camel.util.component.ApiMethodPropertiesHelper;
/**
* For uploading, downloading and managing files, folders, groups, collaborations, etc on box DOT com.
*/
@UriEndpoint(scheme = "box", title = "Box", syntax = "box:apiName/methodName", consumerClass = BoxConsumer.class, consumerPrefix = "consumer", label = "api,file,cloud")
public class BoxEndpoint extends AbstractApiEndpoint<BoxApiName, BoxConfiguration> {
private static final String SHARED_LINK_PROPERTY = "sharedLink";
private static final String SHARED_PASSWORD_PROPERTY = "sharedPassword";
@UriParam
private BoxConfiguration configuration;
// cached client
private CachedBoxClient cachedBoxClient;
// proxy manager
private IBoxResourceManager apiProxy;
// configuration values for shared links
private String sharedLink;
private String sharedPassword;
private boolean boxClientShared;
public BoxEndpoint(String uri, BoxComponent component,
BoxApiName apiName, String methodName, BoxConfiguration endpointConfiguration) {
super(uri, component, apiName, methodName, BoxApiCollection.getCollection().getHelper(apiName), endpointConfiguration);
this.configuration = endpointConfiguration;
}
public Producer createProducer() throws Exception {
// validate producer APIs
if (getApiName() == BoxApiName.POLL_EVENTS) {
throw new IllegalArgumentException("Producer endpoints do not support endpoint prefix "
+ BoxApiName.POLL_EVENTS.getName());
}
return new BoxProducer(this);
}
public Consumer createConsumer(Processor processor) throws Exception {
// make sure inBody is not set for consumers
if (inBody != null) {
throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint");
}
// validate consumer APIs
if (getApiName() != BoxApiName.POLL_EVENTS) {
throw new IllegalArgumentException("Consumer endpoint only supports endpoint prefix "
+ BoxApiName.POLL_EVENTS.getName());
}
final BoxConsumer consumer = new BoxConsumer(this, processor);
// also set consumer.* properties
configureConsumer(consumer);
return consumer;
}
@Override
protected ApiMethodPropertiesHelper<BoxConfiguration> getPropertiesHelper() {
return BoxPropertiesHelper.getHelper();
}
protected String getThreadProfileName() {
return BoxConstants.THREAD_PROFILE_NAME;
}
@Override
protected void afterConfigureProperties() {
// create client eagerly, a good way to validate configuration
createBoxClient();
this.sharedLink = configuration.getSharedLink();
this.sharedPassword = configuration.getSharedPassword();
// validate shared endpoints
switch (getApiName()) {
case SHARED_COMMENTS:
case SHARED_FILES:
case SHARED_FOLDERS:
case SHARED_ITEMS:
if (ObjectHelper.isEmpty(sharedLink)) {
log.warn("Header properties sharedLink and sharedPassword MUST be provided for endpoint {}",
getEndpointUri());
}
break;
default:
}
}
private void createBoxClient() {
final BoxComponent component = getComponent();
this.boxClientShared = configuration.equals(getComponent().getConfiguration());
if (boxClientShared) {
// get shared singleton client from Component
cachedBoxClient = component.getBoxClient();
} else {
cachedBoxClient = BoxClientHelper.createBoxClient(configuration);
}
}
@Override
public BoxComponent getComponent() {
return (BoxComponent) super.getComponent();
}
@Override
public void interceptProperties(Map<String, Object> properties) {
// set shared link and password from configuration if not set as header properties
if (!properties.containsKey(SHARED_LINK_PROPERTY) && !ObjectHelper.isEmpty(sharedLink)) {
properties.put(SHARED_LINK_PROPERTY, sharedLink);
}
if (!properties.containsKey(SHARED_PASSWORD_PROPERTY) && !ObjectHelper.isEmpty(sharedPassword)) {
properties.put(SHARED_PASSWORD_PROPERTY, sharedPassword);
}
}
@Override
public Object getApiProxy(ApiMethod method, Map<String, Object> args) {
if (apiProxy == null) {
// create API proxy lazily
createApiProxy(args);
}
return apiProxy;
}
private void createApiProxy(Map<String, Object> args) {
// get shared link and password from args
final String sharedLink = (String) args.get("sharedLink");
final String sharedPassword = (String) args.get("sharedPassword");
switch (apiName) {
case SHARED_COMMENTS:
case SHARED_FILES:
case SHARED_FOLDERS:
case SHARED_ITEMS:
if (ObjectHelper.isEmpty(sharedLink)) {
throw new IllegalArgumentException("Missing required property sharedLink");
}
default:
}
final BoxClient boxClient = cachedBoxClient.getBoxClient();
switch (apiName) {
case COLLABORATIONS:
apiProxy = boxClient.getCollaborationsManager();
break;
case COMMENTS:
apiProxy = boxClient.getCommentsManager();
break;
case EVENTS:
apiProxy = boxClient.getEventsManager();
break;
case FILES:
apiProxy = boxClient.getFilesManager();
break;
case FOLDERS:
apiProxy = boxClient.getFoldersManager();
break;
case GROUPS:
apiProxy = boxClient.getGroupsManager();
break;
case SEARCH:
apiProxy = boxClient.getSearchManager();
break;
case SHARED_FILES:
apiProxy = boxClient.getSharedFilesManager(sharedLink, sharedPassword);
break;
case SHARED_FOLDERS:
apiProxy = boxClient.getSharedFoldersManager(sharedLink, sharedPassword);
break;
case SHARED_COMMENTS:
apiProxy = boxClient.getSharedCommentsManager(sharedLink, sharedPassword);
break;
case SHARED_ITEMS:
apiProxy = boxClient.getSharedItemsManager(sharedLink, sharedPassword);
break;
case USERS:
apiProxy = boxClient.getUsersManager();
break;
default:
}
}
@Override
protected void doStart() throws Exception {
BoxClientHelper.getOAuthToken(configuration, cachedBoxClient);
}
@Override
protected void doStop() throws Exception {
try {
if (!boxClientShared) {
// while there is no way to suspend BoxClient, we can close idle connections to be nice
BoxClientHelper.closeIdleConnections(cachedBoxClient);
}
} finally {
super.doStop();
}
}
@Override
public void doShutdown() throws Exception {
try {
// cleanup if BoxClient is not shared
if (!boxClientShared) {
BoxClientHelper.shutdownBoxClient(configuration, cachedBoxClient);
}
} finally {
cachedBoxClient = null;
super.doShutdown();
}
}
public CachedBoxClient getBoxClient() {
return cachedBoxClient;
}
}
| |
/**
* Copyright (c) 2014 Samsung Electronics, Inc.,
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.samsung.sec.dexter.cppcheck.plugin;
import java.util.Map;
import org.apache.log4j.Logger;
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
import org.eclipse.cdt.core.parser.ParserLanguage;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.io.Files;
import com.samsung.sec.dexter.core.analyzer.AnalysisConfig;
import com.samsung.sec.dexter.core.analyzer.AnalysisResult;
import com.samsung.sec.dexter.core.analyzer.ResultFileConstant;
import com.samsung.sec.dexter.core.checker.Checker;
import com.samsung.sec.dexter.core.checker.CheckerConfig;
import com.samsung.sec.dexter.core.config.DexterConfig;
import com.samsung.sec.dexter.core.config.DexterConfig.LANGUAGE;
import com.samsung.sec.dexter.core.defect.PreOccurence;
import com.samsung.sec.dexter.core.exception.DexterRuntimeException;
import com.samsung.sec.dexter.core.plugin.PluginVersion;
import com.samsung.sec.dexter.core.util.DexterUtil;
import com.samsung.sec.dexter.util.CppUtil;
import com.samsung.sec.dexter.util.TranslationUnitFactory;
public class ResultFileHandler extends DefaultHandler {
private PreOccurence currentOccurence;
private AnalysisResult result;
private AnalysisConfig config;
private CheckerConfig checkerConfig;
private String sourcecode;
private IASTTranslationUnit translationUnit;
private String fileExtension;
private final static Logger logger = Logger.getLogger(ResultFileHandler.class);
public ResultFileHandler(final AnalysisResult result, final AnalysisConfig config, final CheckerConfig checkerConfig){
this.config = config;
this.checkerConfig = checkerConfig;
this.result = result;
sourcecode = config.getSourcecodeThatReadIfNotExist();
translationUnit = TranslationUnitFactory.getASTTranslationUnit(sourcecode, ParserLanguage.CPP,
config.getSourceFileFullPath());
fileExtension = Files.getFileExtension(config.getFileName());//sourceFilePath.substring(sourceFilePath.indexOf('.'));
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#startDocument()
*/
@Override
public void startDocument() throws SAXException {
super.startDocument();
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#endDocument()
*/
@Override
public void endDocument() throws SAXException {
super.endDocument();
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
@Override
public void startElement(final String uri, final String localName, final String qName, final Attributes attributes) throws SAXException {
super.startElement(uri, localName, qName, attributes);
if("error".equals(qName)){
final String checkerCode = attributes.getValue("id").toLowerCase();
currentOccurence = new PreOccurence();
currentOccurence.setLanguage(LANGUAGE.CPP.toString());
if(checkerCode.startsWith(DexterConfig.SECURITY_CHECK_PREFIX)){
currentOccurence.setMessage(attributes.getValue("msg").replace("'", "'"));
}
else{
currentOccurence.setMessage(attributes.getValue("verbose").replace("'", "'"));
}
currentOccurence.setToolName(CppcheckDexterPlugin.PLUGIN_NAME);
currentOccurence.setFileName(config.getFileName());
currentOccurence.setModulePath(config.getModulePath());
currentOccurence.setCheckerCode(checkerCode);
try{
Checker checker = checkerConfig.getChecker(checkerCode);
currentOccurence.setSeverityCode(checker.getSeverityCode());
currentOccurence.setCategoryName(checker.getCategoryName());
} catch (DexterRuntimeException e){
logger.info(e.getMessage());
if (!(DexterConfig.getInstance().getRunMode().equals(DexterConfig.RunMode.CLI)
&& DexterConfig.getInstance().isCheckerEnableOption())) {
Checker checker = new Checker(checkerCode, checkerCode,
PluginVersion.fromImplementationVersion(CppcheckDexterPlugin.class).getVersion(), true);
if ("true".equals(attributes.getValue("inconclusive"))) {
checker.setSeverityCode("ETC");
checker.setActive(false);
} else {
setSeverityForNewChecker(attributes, checker);
}
checkerConfig.addChecker(checker);
}
logger.info("Found new checker(" + checkerCode + ") in " + config.getSourceFileFullPath());
}
} else if("location".equals(qName)){
final String fileName = attributes.getValue("file");
if(!result.getSourceFileFullPath().equals(DexterUtil.refinePath(fileName))){
logger.debug("target file and defect detected file are not same");
logger.debug("target file: " + result.getSourceFileFullPath());
logger.debug("detected file: " + fileName);
return;
}
currentOccurence.setStartLine(Integer.parseInt(attributes.getValue("line")));
currentOccurence.setEndLine(Integer.parseInt(attributes.getValue("line")));
currentOccurence.setCharStart(-1);
currentOccurence.setCharEnd(-1);
String locationMsg = attributes.getValue("msg");
if(Strings.isNullOrEmpty(locationMsg) == false){
currentOccurence.setMessage(currentOccurence.getMessage() + " " + localName);
}
Map<String, String> nameMap = CppUtil.extractModuleName(translationUnit, sourcecode, currentOccurence.getStartLine());
if(Strings.isNullOrEmpty(nameMap.get(ResultFileConstant.CLASS_NAME)) == false){
currentOccurence.setClassName(nameMap.get(ResultFileConstant.CLASS_NAME));
}
if(Strings.isNullOrEmpty(nameMap.get(ResultFileConstant.METHOD_NAME)) == false){
currentOccurence.setMethodName(nameMap.get(ResultFileConstant.METHOD_NAME));
}
}
}
private void setSeverityForNewChecker(final Attributes attributes, Checker checker) {
// do not use new checker before reviewing in SE
checker.setSeverityCode("ETC");
/*
switch(attributes.getValue("severity")){
case "error" :
checker.setSeverityCode("CRI");
break;
case "warning" :
checker.setSeverityCode("MAJ");
break;
case "style":
case "performance":
case "portability":
checker.setSeverityCode("MIN");
break;
default :
checker.setSeverityCode("ETC");
}
*/
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
super.endElement(uri, localName, qName);
if("error".equals(qName)){
if(checkerConfig.isActiveChecker(currentOccurence.getCheckerCode()) == false){
return;
}
if (currentOccurence.getStartLine() != -1) {
result.addDefectWithPreOccurence(currentOccurence);
} else {
logger.warn("Not added defect(start line is -1) : " + currentOccurence.toJson());
}
}
}
}
| |
package org.zeroturnaround.jenkins.reporter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import javax.xml.parsers.SAXParser;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import org.zeroturnaround.jenkins.reporter.model.Build;
import org.zeroturnaround.jenkins.reporter.model.JenkinsView;
import org.zeroturnaround.jenkins.reporter.model.Job;
import org.zeroturnaround.jenkins.reporter.model.TestReport;
public class JenkinsViewAnalyser {
private static final Logger log = LoggerFactory.getLogger(JenkinsViewAnalyser.class); // NOSONAR
private final XPath xpath;
private final SAXParser saxParser;
private final JenkinsHttpClient jhc;
private static final int SECONDS_IN_MINUTE = 60;
private static final int SECONDS_IN_HOUR = SECONDS_IN_MINUTE * 60;
private static final int MILLISECONDS_IN_SECOND = 1000;
public JenkinsViewAnalyser(XPath xpath, SAXParser saxParser, JenkinsHttpClient jhc) {
this.xpath = xpath;
this.saxParser = saxParser;
this.jhc = jhc;
}
public JenkinsView getViewData(URI viewUrl) {
JenkinsView viewData = new JenkinsView();
viewData.setName(getViewName(viewUrl));
viewData.setUrl(getViewURL(viewUrl));
viewData.setJobsTotal(getJobCount(viewUrl));
viewData.setJobs(readJobs(viewUrl));
return viewData;
}
private URI getViewURL(URI viewUrl) {
try {
Document doc = jhc.fetchAsXMLDocument(viewUrl.toASCIIString() + "/api/xml?tree=name,url");
URI uri = new URI(doc.getElementsByTagName("url").item(0).getTextContent());
return uri;
}
catch (DOMException e) {
throw new ProcessingException(e);
}
catch (URISyntaxException e) {
throw new ProcessingException(e);
}
}
private String getViewName(URI viewUrl) {
Document doc;
doc = jhc.fetchAsXMLDocument(viewUrl.toASCIIString() + "/api/xml?tree=name,url");
return doc.getElementsByTagName("name").item(0).getTextContent();
}
private int getJobCount(URI uri) {
final String fullUrl = uri.toASCIIString() + "/api/xml?wrapper=jobs&tree=jobs[name,url,color]";
log.debug("Counting total number of jobs for '{}'", fullUrl);
NodeList nodes;
try {
Document doc = jhc.fetchAsXMLDocument(fullUrl);
final XPathExpression expr = xpath.compile("//job");
nodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
}
catch (XPathExpressionException e) {
throw new ProcessingException(e);
}
return nodes.getLength();
}
private Collection<Job> readJobs(URI uri) {
final String fullUrl = uri.toASCIIString() + "/api/xml?xpath=//job&wrapper=jobs&tree=jobs[name,url,color]";
log.debug("Reading information about failing jobs for '{}'", fullUrl);
Document doc = jhc.fetchAsXMLDocument(fullUrl);
final Collection<Job> jobs = parseJobsFromXml(doc, "job", true);
// call jenkins after parsing xml
log.info("Fetching last completed build info for " + jobs.size() + " jobs");
for (final Iterator<Job> iter = jobs.iterator(); iter.hasNext();) {
final Job job = iter.next();
try {
job.setLastCompletedBuild(getLastCompletedBuild(job));
job.setChildren(readChildrenJobs(job));
}
// sometimes there is no last completed build
// we can ignore the job
catch (DocumentNotFoundException e) {
job.setLastCompletedBuild(null);
iter.remove();
}
}
return jobs;
}
private Collection<Job> readChildrenJobs(Job parentJob) {
final String uri = parentJob.getUrl().toASCIIString() + "/api/xml?xpath=/matrixProject/activeConfiguration&wrapper=activeConfigurations";
log.debug("Reading child jobs of matrix job '{}' at '{}'", parentJob.getName(), uri.toString());
final Document doc = jhc.fetchAsXMLDocument(uri);
final Collection<Job> jobs = parseJobsFromXml(doc, "activeConfiguration", false);
if (!jobs.isEmpty()) {
log.info("Fetching last completed build info for " + jobs.size() + " child jobs of " + parentJob.getName() + "...");
for (final Iterator<Job> iter = jobs.iterator(); iter.hasNext();) {
final Job job = iter.next();
try {
Build lastCompletedBuild = getLastCompletedBuild(job);
job.setLastCompletedBuild(lastCompletedBuild);
}
// sometimes there is no last completed build
// we can ignore the job
catch (DocumentNotFoundException e) {
job.setLastCompletedBuild(null);
iter.remove();
}
}
}
return jobs;
}
private Collection<Job> parseJobsFromXml(final Document doc, final String jobNodeName, final boolean filterByPrefix) {
NodeList nodes;
try {
final XPathExpression expr = xpath.compile("//" + jobNodeName);
nodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
}
catch (XPathExpressionException e) {
throw new ProcessingException(e);
}
final Collection<Job> jobs = new ArrayList<Job>();
for (int i = 0; i < nodes.getLength(); i++) {
final Node node = nodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
final Element el = (Element) node;
final String name = el.getElementsByTagName("name").item(0).getTextContent();
if (filterByPrefix && Main.JOB_NAME_PREFIX != null && !name.startsWith(Main.JOB_NAME_PREFIX)) {
continue;
}
// these listings also list "Groups" that are not actually jobs
// I'll just ignore these and I happen to know they don't have the color
// available :)
if (el.getElementsByTagName("color").getLength() == 0) {
continue;
}
final Job job = new Job();
job.setName(name);
try {
String jobUrl = el.getElementsByTagName("url").item(0).getTextContent();
job.setUrl(new URI(jobUrl));
}
catch (DOMException e) {
throw new ProcessingException(e);
}
catch (URISyntaxException e) {
throw new ProcessingException(e);
}
job.setColor(el.getElementsByTagName("color").item(0).getTextContent());
jobs.add(job);
}
}
return jobs;
}
private Build getLastCompletedBuild(Job job) {
log.debug("Fetching last completed build info for job {}", job.getName());
final String uri = job.getUrl() + "lastCompletedBuild/api/xml?tree=number,url,timestamp,duration,result,builtOn";
final Document doc = jhc.fetchAsXMLDocument(uri);
Build build = new Build();
build.setId(Integer.parseInt(doc.getElementsByTagName("number").item(0).getTextContent()));
build.setResult(doc.getElementsByTagName("result").item(0).getTextContent());
try {
build.setUrl(new URI(doc.getElementsByTagName("url").item(0).getTextContent()));
build.setTestReport(readTestReport(build.getUrl()));
}
catch (DOMException e) {
throw new ProcessingException(e);
}
catch (URISyntaxException e) {
throw new ProcessingException(e);
}
catch (SAXException e) {
throw new ProcessingException(e);
}
catch (IOException e) {
throw new ProcessingException(e);
}
build.setTimestamp(new Date(Long.parseLong(doc.getElementsByTagName("timestamp").item(0).getTextContent())));
Calendar cal = Calendar.getInstance();
cal.setTime(build.getTimestamp());
// FIXME this will cause confusion in the beginning of Jan
build.setDayOfYear(cal.get(Calendar.DAY_OF_YEAR));
final long durationSeconds = Long.parseLong(doc.getElementsByTagName("duration").item(0).getTextContent()) / MILLISECONDS_IN_SECOND;
build.setDuration(String.format("%d:%02d:%02d", durationSeconds / SECONDS_IN_HOUR, durationSeconds % SECONDS_IN_HOUR / SECONDS_IN_MINUTE, durationSeconds % SECONDS_IN_MINUTE));
String builtOn = doc.getElementsByTagName("builtOn").item(0).getTextContent();
build.setBuiltOn(builtOn.isEmpty() ? "master" : builtOn);
return build;
}
private TestReport readTestReport(URI buildUrl) throws SAXException, IOException {
final TestReport testReport = new TestReport();
final DefaultHandler handler = new ReadTestReportHandler(testReport);
try {
InputStream is = jhc.fetchAsInputStream(buildUrl + "testReport/api/xml");
saxParser.parse(is, handler);
}
catch (FileNotFoundException e) {
log.debug("No test report available for {}", buildUrl);
return null;
}
return testReport;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.resourcemanager.slotmanager;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.clusterframework.types.ResourceProfile;
import org.apache.flink.runtime.clusterframework.types.SlotID;
import org.apache.flink.runtime.instance.InstanceID;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.metrics.groups.SlotManagerMetricGroup;
import org.apache.flink.runtime.resourcemanager.ResourceManagerId;
import org.apache.flink.runtime.resourcemanager.WorkerResourceSpec;
import org.apache.flink.runtime.resourcemanager.registration.TaskExecutorConnection;
import org.apache.flink.runtime.rest.messages.taskmanager.SlotInfo;
import org.apache.flink.runtime.slots.ResourceRequirement;
import org.apache.flink.runtime.slots.ResourceRequirements;
import org.apache.flink.runtime.taskexecutor.SlotReport;
import org.apache.flink.runtime.taskexecutor.SlotStatus;
import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway;
import org.apache.flink.runtime.taskexecutor.exceptions.SlotOccupiedException;
import org.apache.flink.runtime.util.ResourceCounter;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.concurrent.FutureUtils;
import org.apache.flink.util.concurrent.ScheduledExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
/** Implementation of {@link SlotManager} supporting declarative slot management. */
public class DeclarativeSlotManager implements SlotManager {
private static final Logger LOG = LoggerFactory.getLogger(DeclarativeSlotManager.class);
private final SlotTracker slotTracker;
private final ResourceTracker resourceTracker;
private final BiFunction<Executor, ResourceActions, TaskExecutorManager>
taskExecutorManagerFactory;
@Nullable private TaskExecutorManager taskExecutorManager;
/** Timeout for slot requests to the task manager. */
private final Time taskManagerRequestTimeout;
private final SlotMatchingStrategy slotMatchingStrategy;
private final SlotManagerMetricGroup slotManagerMetricGroup;
private final Map<JobID, String> jobMasterTargetAddresses = new HashMap<>();
private final Map<SlotID, AllocationID> pendingSlotAllocations;
private boolean sendNotEnoughResourceNotifications = true;
/** ResourceManager's id. */
@Nullable private ResourceManagerId resourceManagerId;
/** Executor for future callbacks which have to be "synchronized". */
@Nullable private Executor mainThreadExecutor;
/** Callbacks for resource (de-)allocations. */
@Nullable private ResourceActions resourceActions;
/** True iff the component has been started. */
private boolean started;
public DeclarativeSlotManager(
ScheduledExecutor scheduledExecutor,
SlotManagerConfiguration slotManagerConfiguration,
SlotManagerMetricGroup slotManagerMetricGroup,
ResourceTracker resourceTracker,
SlotTracker slotTracker) {
Preconditions.checkNotNull(slotManagerConfiguration);
this.taskManagerRequestTimeout = slotManagerConfiguration.getTaskManagerRequestTimeout();
this.slotManagerMetricGroup = Preconditions.checkNotNull(slotManagerMetricGroup);
this.resourceTracker = Preconditions.checkNotNull(resourceTracker);
pendingSlotAllocations = new HashMap<>(16);
this.slotTracker = Preconditions.checkNotNull(slotTracker);
slotTracker.registerSlotStatusUpdateListener(createSlotStatusUpdateListener());
slotMatchingStrategy = slotManagerConfiguration.getSlotMatchingStrategy();
taskExecutorManagerFactory =
(executor, resourceActions) ->
new TaskExecutorManager(
slotManagerConfiguration.getDefaultWorkerResourceSpec(),
slotManagerConfiguration.getNumSlotsPerWorker(),
slotManagerConfiguration.getMaxSlotNum(),
slotManagerConfiguration.isWaitResultConsumedBeforeRelease(),
slotManagerConfiguration.getRedundantTaskManagerNum(),
slotManagerConfiguration.getTaskManagerTimeout(),
scheduledExecutor,
executor,
resourceActions);
resourceManagerId = null;
resourceActions = null;
mainThreadExecutor = null;
taskExecutorManager = null;
started = false;
}
private SlotStatusUpdateListener createSlotStatusUpdateListener() {
return (taskManagerSlot, previous, current, jobId) -> {
if (previous == SlotState.PENDING) {
pendingSlotAllocations.remove(taskManagerSlot.getSlotId());
}
if (current == SlotState.PENDING) {
resourceTracker.notifyAcquiredResource(jobId, taskManagerSlot.getResourceProfile());
}
if (current == SlotState.FREE) {
resourceTracker.notifyLostResource(jobId, taskManagerSlot.getResourceProfile());
}
if (current == SlotState.ALLOCATED) {
taskExecutorManager.occupySlot(taskManagerSlot.getInstanceId());
}
if (previous == SlotState.ALLOCATED && current == SlotState.FREE) {
taskExecutorManager.freeSlot(taskManagerSlot.getInstanceId());
}
};
}
@Override
public void setFailUnfulfillableRequest(boolean failUnfulfillableRequest) {
// this sets up a grace period, e.g., when the cluster was started, to give task executors
// time to connect
sendNotEnoughResourceNotifications = failUnfulfillableRequest;
if (failUnfulfillableRequest) {
checkResourceRequirements();
}
}
// ---------------------------------------------------------------------------------------------
// Component lifecycle methods
// ---------------------------------------------------------------------------------------------
/**
* Starts the slot manager with the given leader id and resource manager actions.
*
* @param newResourceManagerId to use for communication with the task managers
* @param newMainThreadExecutor to use to run code in the ResourceManager's main thread
* @param newResourceActions to use for resource (de-)allocations
*/
@Override
public void start(
ResourceManagerId newResourceManagerId,
Executor newMainThreadExecutor,
ResourceActions newResourceActions) {
LOG.debug("Starting the slot manager.");
this.resourceManagerId = Preconditions.checkNotNull(newResourceManagerId);
mainThreadExecutor = Preconditions.checkNotNull(newMainThreadExecutor);
resourceActions = Preconditions.checkNotNull(newResourceActions);
taskExecutorManager =
taskExecutorManagerFactory.apply(newMainThreadExecutor, newResourceActions);
started = true;
registerSlotManagerMetrics();
}
private void registerSlotManagerMetrics() {
slotManagerMetricGroup.gauge(
MetricNames.TASK_SLOTS_AVAILABLE, () -> (long) getNumberFreeSlots());
slotManagerMetricGroup.gauge(
MetricNames.TASK_SLOTS_TOTAL, () -> (long) getNumberRegisteredSlots());
}
/** Suspends the component. This clears the internal state of the slot manager. */
@Override
public void suspend() {
if (!started) {
return;
}
LOG.info("Suspending the slot manager.");
slotManagerMetricGroup.close();
resourceTracker.clear();
if (taskExecutorManager != null) {
taskExecutorManager.close();
for (InstanceID registeredTaskManager : taskExecutorManager.getTaskExecutors()) {
unregisterTaskManager(
registeredTaskManager,
new SlotManagerException("The slot manager is being suspended."));
}
}
taskExecutorManager = null;
resourceManagerId = null;
resourceActions = null;
started = false;
}
/**
* Closes the slot manager.
*
* @throws Exception if the close operation fails
*/
@Override
public void close() throws Exception {
LOG.info("Closing the slot manager.");
suspend();
}
// ---------------------------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------------------------
@Override
public void clearResourceRequirements(JobID jobId) {
checkInit();
maybeReclaimInactiveSlots(jobId);
jobMasterTargetAddresses.remove(jobId);
resourceTracker.notifyResourceRequirements(jobId, Collections.emptyList());
}
@Override
public void processResourceRequirements(ResourceRequirements resourceRequirements) {
checkInit();
if (resourceRequirements.getResourceRequirements().isEmpty()
&& resourceTracker.isRequirementEmpty(resourceRequirements.getJobId())) {
return;
} else if (resourceRequirements.getResourceRequirements().isEmpty()) {
LOG.info("Clearing resource requirements of job {}", resourceRequirements.getJobId());
} else {
LOG.info(
"Received resource requirements from job {}: {}",
resourceRequirements.getJobId(),
resourceRequirements.getResourceRequirements());
}
if (!resourceRequirements.getResourceRequirements().isEmpty()) {
jobMasterTargetAddresses.put(
resourceRequirements.getJobId(), resourceRequirements.getTargetAddress());
}
resourceTracker.notifyResourceRequirements(
resourceRequirements.getJobId(), resourceRequirements.getResourceRequirements());
checkResourceRequirements();
}
private void maybeReclaimInactiveSlots(JobID jobId) {
if (!resourceTracker.getAcquiredResources(jobId).isEmpty()) {
final Collection<TaskExecutorConnection> taskExecutorsWithAllocatedSlots =
slotTracker.getTaskExecutorsWithAllocatedSlotsForJob(jobId);
for (TaskExecutorConnection taskExecutorConnection : taskExecutorsWithAllocatedSlots) {
final TaskExecutorGateway taskExecutorGateway =
taskExecutorConnection.getTaskExecutorGateway();
taskExecutorGateway.freeInactiveSlots(jobId, taskManagerRequestTimeout);
}
}
}
/**
* Registers a new task manager at the slot manager. This will make the task managers slots
* known and, thus, available for allocation.
*
* @param taskExecutorConnection for the new task manager
* @param initialSlotReport for the new task manager
* @param totalResourceProfile for the new task manager
* @param defaultSlotResourceProfile for the new task manager
* @return True if the task manager has not been registered before and is registered
* successfully; otherwise false
*/
@Override
public boolean registerTaskManager(
final TaskExecutorConnection taskExecutorConnection,
SlotReport initialSlotReport,
ResourceProfile totalResourceProfile,
ResourceProfile defaultSlotResourceProfile) {
checkInit();
LOG.debug(
"Registering task executor {} under {} at the slot manager.",
taskExecutorConnection.getResourceID(),
taskExecutorConnection.getInstanceID());
// we identify task managers by their instance id
if (taskExecutorManager.isTaskManagerRegistered(taskExecutorConnection.getInstanceID())) {
LOG.debug(
"Task executor {} was already registered.",
taskExecutorConnection.getResourceID());
reportSlotStatus(taskExecutorConnection.getInstanceID(), initialSlotReport);
return false;
} else {
if (!taskExecutorManager.registerTaskManager(
taskExecutorConnection,
initialSlotReport,
totalResourceProfile,
defaultSlotResourceProfile)) {
LOG.debug(
"Task executor {} could not be registered.",
taskExecutorConnection.getResourceID());
return false;
}
// register the new slots
for (SlotStatus slotStatus : initialSlotReport) {
slotTracker.addSlot(
slotStatus.getSlotID(),
slotStatus.getResourceProfile(),
taskExecutorConnection,
slotStatus.getJobID());
}
checkResourceRequirements();
return true;
}
}
@Override
public boolean unregisterTaskManager(InstanceID instanceId, Exception cause) {
checkInit();
LOG.debug("Unregistering task executor {} from the slot manager.", instanceId);
if (taskExecutorManager.isTaskManagerRegistered(instanceId)) {
slotTracker.removeSlots(taskExecutorManager.getSlotsOf(instanceId));
taskExecutorManager.unregisterTaskExecutor(instanceId);
checkResourceRequirements();
return true;
} else {
LOG.debug(
"There is no task executor registered with instance ID {}. Ignoring this message.",
instanceId);
return false;
}
}
/**
* Reports the current slot allocations for a task manager identified by the given instance id.
*
* @param instanceId identifying the task manager for which to report the slot status
* @param slotReport containing the status for all of its slots
* @return true if the slot status has been updated successfully, otherwise false
*/
@Override
public boolean reportSlotStatus(InstanceID instanceId, SlotReport slotReport) {
checkInit();
LOG.debug("Received slot report from instance {}: {}.", instanceId, slotReport);
if (taskExecutorManager.isTaskManagerRegistered(instanceId)) {
if (slotTracker.notifySlotStatus(slotReport)) {
checkResourceRequirements();
}
return true;
} else {
LOG.debug(
"Received slot report for unknown task manager with instance id {}. Ignoring this report.",
instanceId);
return false;
}
}
/**
* Free the given slot from the given allocation. If the slot is still allocated by the given
* allocation id, then the slot will be marked as free and will be subject to new slot requests.
*
* @param slotId identifying the slot to free
* @param allocationId with which the slot is presumably allocated
*/
@Override
public void freeSlot(SlotID slotId, AllocationID allocationId) {
checkInit();
LOG.debug("Freeing slot {}.", slotId);
slotTracker.notifyFree(slotId);
checkResourceRequirements();
}
// ---------------------------------------------------------------------------------------------
// Requirement matching
// ---------------------------------------------------------------------------------------------
/**
* Matches resource requirements against available resources. In a first round requirements are
* matched against free slot, and any match results in a slot allocation. The remaining
* unfulfilled requirements are matched against pending slots, allocating more workers if no
* matching pending slot could be found. If the requirements for a job could not be fulfilled
* then a notification is sent to the job master informing it as such.
*
* <p>Performance notes: At it's core this method loops, for each job, over all free/pending
* slots for each required slot, trying to find a matching slot. One should generally go in with
* the assumption that this runs in numberOfJobsRequiringResources * numberOfRequiredSlots *
* numberOfFreeOrPendingSlots. This is especially important when dealing with pending slots, as
* matches between requirements and pending slots are not persisted and recomputed on each call.
* This may required further refinements in the future; e.g., persisting the matches between
* requirements and pending slots, or not matching against pending slots at all.
*
* <p>When dealing with unspecific resource profiles (i.e., {@link ResourceProfile#ANY}/{@link
* ResourceProfile#UNKNOWN}), then the number of free/pending slots is not relevant because we
* only need exactly 1 comparison to determine whether a slot can be fulfilled or not, since
* they are all the same anyway.
*
* <p>When dealing with specific resource profiles things can be a lot worse, with the classical
* cases where either no matches are found, or only at the very end of the iteration. In the
* absolute worst case, with J jobs, requiring R slots each with a unique resource profile such
* each pair of these profiles is not matching, and S free/pending slots that don't fulfill any
* requirement, then this method does a total of J*R*S resource profile comparisons.
*/
private void checkResourceRequirements() {
final Map<JobID, Collection<ResourceRequirement>> missingResources =
resourceTracker.getMissingResources();
if (missingResources.isEmpty()) {
return;
}
final Map<JobID, ResourceCounter> unfulfilledRequirements = new LinkedHashMap<>();
for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements :
missingResources.entrySet()) {
final JobID jobId = resourceRequirements.getKey();
final ResourceCounter unfulfilledJobRequirements =
tryAllocateSlotsForJob(jobId, resourceRequirements.getValue());
if (!unfulfilledJobRequirements.isEmpty()) {
unfulfilledRequirements.put(jobId, unfulfilledJobRequirements);
}
}
if (unfulfilledRequirements.isEmpty()) {
return;
}
ResourceCounter pendingSlots =
ResourceCounter.withResources(
taskExecutorManager.getPendingTaskManagerSlots().stream()
.collect(
Collectors.groupingBy(
PendingTaskManagerSlot::getResourceProfile,
Collectors.summingInt(x -> 1))));
for (Map.Entry<JobID, ResourceCounter> unfulfilledRequirement :
unfulfilledRequirements.entrySet()) {
pendingSlots =
tryFulfillRequirementsWithPendingSlots(
unfulfilledRequirement.getKey(),
unfulfilledRequirement.getValue().getResourcesWithCount(),
pendingSlots);
}
}
private ResourceCounter tryAllocateSlotsForJob(
JobID jobId, Collection<ResourceRequirement> missingResources) {
ResourceCounter outstandingRequirements = ResourceCounter.empty();
for (ResourceRequirement resourceRequirement : missingResources) {
int numMissingSlots =
internalTryAllocateSlots(
jobId, jobMasterTargetAddresses.get(jobId), resourceRequirement);
if (numMissingSlots > 0) {
outstandingRequirements =
outstandingRequirements.add(
resourceRequirement.getResourceProfile(), numMissingSlots);
}
}
return outstandingRequirements;
}
/**
* Tries to allocate slots for the given requirement. If there are not enough slots available,
* the resource manager is informed to allocate more resources.
*
* @param jobId job to allocate slots for
* @param targetAddress address of the jobmaster
* @param resourceRequirement required slots
* @return the number of missing slots
*/
private int internalTryAllocateSlots(
JobID jobId, String targetAddress, ResourceRequirement resourceRequirement) {
final ResourceProfile requiredResource = resourceRequirement.getResourceProfile();
Collection<TaskManagerSlotInformation> freeSlots = slotTracker.getFreeSlots();
int numUnfulfilled = 0;
for (int x = 0; x < resourceRequirement.getNumberOfRequiredSlots(); x++) {
final Optional<TaskManagerSlotInformation> reservedSlot =
slotMatchingStrategy.findMatchingSlot(
requiredResource, freeSlots, this::getNumberRegisteredSlotsOf);
if (reservedSlot.isPresent()) {
// we do not need to modify freeSlots because it is indirectly modified by the
// allocation
allocateSlot(reservedSlot.get(), jobId, targetAddress, requiredResource);
} else {
// exit loop early; we won't find a matching slot for this requirement
int numRemaining = resourceRequirement.getNumberOfRequiredSlots() - x;
numUnfulfilled += numRemaining;
break;
}
}
return numUnfulfilled;
}
/**
* Allocates the given slot. This entails sending a registration message to the task manager and
* treating failures.
*
* @param taskManagerSlot slot to allocate
* @param jobId job for which the slot should be allocated for
* @param targetAddress address of the job master
* @param resourceProfile resource profile for the requirement for which the slot is used
*/
private void allocateSlot(
TaskManagerSlotInformation taskManagerSlot,
JobID jobId,
String targetAddress,
ResourceProfile resourceProfile) {
final SlotID slotId = taskManagerSlot.getSlotId();
LOG.debug(
"Starting allocation of slot {} for job {} with resource profile {}.",
slotId,
jobId,
resourceProfile);
final InstanceID instanceId = taskManagerSlot.getInstanceId();
if (!taskExecutorManager.isTaskManagerRegistered(instanceId)) {
throw new IllegalStateException(
"Could not find a registered task manager for instance id " + instanceId + '.');
}
final TaskExecutorConnection taskExecutorConnection =
taskManagerSlot.getTaskManagerConnection();
final TaskExecutorGateway gateway = taskExecutorConnection.getTaskExecutorGateway();
final AllocationID allocationId = new AllocationID();
slotTracker.notifyAllocationStart(slotId, jobId);
taskExecutorManager.markUsed(instanceId);
pendingSlotAllocations.put(slotId, allocationId);
// RPC call to the task manager
CompletableFuture<Acknowledge> requestFuture =
gateway.requestSlot(
slotId,
jobId,
allocationId,
resourceProfile,
targetAddress,
resourceManagerId,
taskManagerRequestTimeout);
CompletableFuture<Void> slotAllocationResponseProcessingFuture =
requestFuture.handleAsync(
(Acknowledge acknowledge, Throwable throwable) -> {
final AllocationID currentAllocationForSlot =
pendingSlotAllocations.get(slotId);
if (currentAllocationForSlot == null
|| !currentAllocationForSlot.equals(allocationId)) {
LOG.debug(
"Ignoring slot allocation update from task executor {} for slot {} and job {}, because the allocation was already completed or cancelled.",
instanceId,
slotId,
jobId);
return null;
}
if (acknowledge != null) {
LOG.trace(
"Completed allocation of slot {} for job {}.",
slotId,
jobId);
slotTracker.notifyAllocationComplete(slotId, jobId);
} else {
if (throwable instanceof SlotOccupiedException) {
SlotOccupiedException exception =
(SlotOccupiedException) throwable;
LOG.debug(
"Tried allocating slot {} for job {}, but it was already allocated for job {}.",
slotId,
jobId,
exception.getJobId());
// report as a slot status to force the state transition
// this could be a problem if we ever assume that the task
// executor always reports about all slots
slotTracker.notifySlotStatus(
Collections.singleton(
new SlotStatus(
slotId,
taskManagerSlot.getResourceProfile(),
exception.getJobId(),
exception.getAllocationId())));
} else {
LOG.warn(
"Slot allocation for slot {} for job {} failed.",
slotId,
jobId,
throwable);
slotTracker.notifyFree(slotId);
}
checkResourceRequirements();
}
return null;
},
mainThreadExecutor);
FutureUtils.assertNoException(slotAllocationResponseProcessingFuture);
}
private ResourceCounter tryFulfillRequirementsWithPendingSlots(
JobID jobId,
Collection<Map.Entry<ResourceProfile, Integer>> missingResources,
ResourceCounter pendingSlots) {
for (Map.Entry<ResourceProfile, Integer> missingResource : missingResources) {
ResourceProfile profile = missingResource.getKey();
for (int i = 0; i < missingResource.getValue(); i++) {
final MatchingResult matchingResult =
tryFulfillWithPendingSlots(profile, pendingSlots);
pendingSlots = matchingResult.getNewAvailableResources();
if (!matchingResult.isSuccessfulMatching()) {
final WorkerAllocationResult allocationResult =
tryAllocateWorkerAndReserveSlot(profile, pendingSlots);
pendingSlots = allocationResult.getNewAvailableResources();
if (!allocationResult.isSuccessfulAllocating()
&& sendNotEnoughResourceNotifications) {
LOG.warn(
"Could not fulfill resource requirements of job {}. Free slots: {}",
jobId,
slotTracker.getFreeSlots().size());
resourceActions.notifyNotEnoughResourcesAvailable(
jobId, resourceTracker.getAcquiredResources(jobId));
return pendingSlots;
}
}
}
}
return pendingSlots;
}
private MatchingResult tryFulfillWithPendingSlots(
ResourceProfile resourceProfile, ResourceCounter pendingSlots) {
Set<ResourceProfile> pendingSlotProfiles = pendingSlots.getResources();
// short-cut, pretty much only applicable to fine-grained resource management
if (pendingSlotProfiles.contains(resourceProfile)) {
pendingSlots = pendingSlots.subtract(resourceProfile, 1);
return new MatchingResult(true, pendingSlots);
}
for (ResourceProfile pendingSlotProfile : pendingSlotProfiles) {
if (pendingSlotProfile.isMatching(resourceProfile)) {
pendingSlots = pendingSlots.subtract(pendingSlotProfile, 1);
return new MatchingResult(true, pendingSlots);
}
}
return new MatchingResult(false, pendingSlots);
}
private WorkerAllocationResult tryAllocateWorkerAndReserveSlot(
ResourceProfile profile, ResourceCounter pendingSlots) {
Optional<ResourceRequirement> newlyFulfillableRequirements =
taskExecutorManager.allocateWorker(profile);
if (newlyFulfillableRequirements.isPresent()) {
ResourceRequirement newSlots = newlyFulfillableRequirements.get();
// reserve one of the new slots
if (newSlots.getNumberOfRequiredSlots() > 1) {
pendingSlots =
pendingSlots.add(
newSlots.getResourceProfile(),
newSlots.getNumberOfRequiredSlots() - 1);
}
return new WorkerAllocationResult(true, pendingSlots);
} else {
return new WorkerAllocationResult(false, pendingSlots);
}
}
// ---------------------------------------------------------------------------------------------
// Legacy APIs
// ---------------------------------------------------------------------------------------------
@Override
public int getNumberRegisteredSlots() {
return taskExecutorManager.getNumberRegisteredSlots();
}
@Override
public int getNumberRegisteredSlotsOf(InstanceID instanceId) {
return taskExecutorManager.getNumberRegisteredSlotsOf(instanceId);
}
@Override
public int getNumberFreeSlots() {
return taskExecutorManager.getNumberFreeSlots();
}
@Override
public int getNumberFreeSlotsOf(InstanceID instanceId) {
return taskExecutorManager.getNumberFreeSlotsOf(instanceId);
}
@Override
public Map<WorkerResourceSpec, Integer> getRequiredResources() {
return taskExecutorManager.getRequiredWorkers();
}
@Override
public ResourceProfile getRegisteredResource() {
return taskExecutorManager.getTotalRegisteredResources();
}
@Override
public ResourceProfile getRegisteredResourceOf(InstanceID instanceID) {
return taskExecutorManager.getTotalRegisteredResourcesOf(instanceID);
}
@Override
public ResourceProfile getFreeResource() {
return taskExecutorManager.getTotalFreeResources();
}
@Override
public ResourceProfile getFreeResourceOf(InstanceID instanceID) {
return taskExecutorManager.getTotalFreeResourcesOf(instanceID);
}
@Override
public Collection<SlotInfo> getAllocatedSlotsOf(InstanceID instanceID) {
// This information is currently not supported for this slot manager.
return Collections.emptyList();
}
// ---------------------------------------------------------------------------------------------
// Internal utility methods
// ---------------------------------------------------------------------------------------------
private void checkInit() {
Preconditions.checkState(started, "The slot manager has not been started.");
}
private static class MatchingResult {
private final boolean isSuccessfulMatching;
private final ResourceCounter newAvailableResources;
private MatchingResult(
boolean isSuccessfulMatching, ResourceCounter newAvailableResources) {
this.isSuccessfulMatching = isSuccessfulMatching;
this.newAvailableResources = Preconditions.checkNotNull(newAvailableResources);
}
private ResourceCounter getNewAvailableResources() {
return newAvailableResources;
}
private boolean isSuccessfulMatching() {
return isSuccessfulMatching;
}
}
private static class WorkerAllocationResult {
private final boolean isSuccessfulAllocating;
private final ResourceCounter newAvailableResources;
private WorkerAllocationResult(
boolean isSuccessfulAllocating, ResourceCounter newAvailableResources) {
this.isSuccessfulAllocating = isSuccessfulAllocating;
this.newAvailableResources = Preconditions.checkNotNull(newAvailableResources);
}
private ResourceCounter getNewAvailableResources() {
return newAvailableResources;
}
private boolean isSuccessfulAllocating() {
return isSuccessfulAllocating;
}
}
}
| |
package org.apache.axis2.transport.mqtt;
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.paho.client.mqttv3.*;
import java.io.IOException;
import java.sql.Timestamp;
public class MqttAsyncCallback implements MqttCallback {
int state = BEGIN;
static final int BEGIN = 0;
static final int CONNECTED = 1;
static final int PUBLISHED = 2;
static final int SUBSCRIBED = 3;
static final int DISCONNECTED = 4;
static final int FINISH = 5;
static final int ERROR = 6;
static final int DISCONNECT = 7;
private MqttConnectOptions conOpt;
private Log log = LogFactory.getLog(MqttAsyncCallback.class);
// Private instance variables
private MqttAsyncClient client;
private String brokerUrl;
private Throwable ex = null;
private final Object waiter = new Object();
private boolean donext = false;
public void setConOpt(MqttConnectOptions conOpt) {
this.conOpt = conOpt;
}
public MqttAsyncCallback(MqttAsyncClient clientAsync) throws MqttException {
client = clientAsync;
// Set this wrapper as the callback handler
client.setCallback(this);
}
/**
* Publish / send a message to an MQTT server
*
* @param topicName the name of the topic to publish to
* @param message the set of bytes to send to the MQTT server
* @throws MqttException
*/
public void publish(String topicName, MqttMessage message) throws Throwable {
// Use a state machine to decide which step to do next. State change occurs
// when a notification is received that an MQTT action has completed
while (state != FINISH) {
switch (state) {
case BEGIN:
// Connect using a non-blocking connect
MqttConnector con = new MqttConnector();
con.doConnect();
break;
case CONNECTED:
// Publish using a non-blocking publisher
Publisher pub = new Publisher();
pub.doPublish(topicName, message);
break;
case PUBLISHED:
state = DISCONNECT;
donext = true;
break;
case DISCONNECT:
Disconnector disc = new Disconnector();
disc.doDisconnect();
break;
case ERROR:
throw ex;
case DISCONNECTED:
state = FINISH;
donext = true;
break;
}
waitForStateChange(10000);
}
}
/**
* Wait for a maximum amount of time for a state change event to occur
*
* @param maxTTW maximum time to wait in milliseconds
* @throws MqttException
*/
private void waitForStateChange(int maxTTW) throws MqttException {
synchronized (waiter) {
if (!donext) {
try {
waiter.wait(maxTTW);
} catch (InterruptedException e) {
log.warn("Timed out while waiting for the senders callback", e);
}
if (ex != null) {
throw (MqttException) ex;
}
}
donext = false;
}
}
/**
* Subscribe to a topic on an MQTT server
* Once subscribed this method waits for the messages to arrive from the server
* that match the subscription. It continues listening for messages until the enter key is
* pressed.
*
* @param topicName to subscribe to (can be wild carded)
* @param qos the maximum quality of service to receive messages at for this subscription
* @throws MqttException
*/
public void subscribe(String topicName, int qos) throws Throwable {
// Use a state machine to decide which step to do next. State change occurs
// when a notification is received that an MQTT action has completed
while (state != FINISH) {
switch (state) {
case BEGIN:
// Connect using a non-blocking connect
MqttConnector con = new MqttConnector();
con.doConnect();
break;
case CONNECTED:
// Subscribe using a non-blocking subscribe
Subscriber sub = new Subscriber();
sub.doSubscribe(topicName, qos);
break;
case SUBSCRIBED:
// Block until Enter is pressed allowing messages to arrive
log.info("Press <Enter> to exit");
try {
System.in.read();
} catch (IOException e) {
//If we can't read we'll just exit
}
state = DISCONNECT;
donext = true;
break;
case DISCONNECT:
Disconnector disc = new Disconnector();
disc.doDisconnect();
break;
case ERROR:
throw ex;
case DISCONNECTED:
state = FINISH;
donext = true;
break;
}
waitForStateChange(10000);
}
}
public void connectionLost(Throwable throwable) {
//ignoring for the moment...
}
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
throw new IllegalStateException();
}
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
if (log.isDebugEnabled()) {
log.debug("Message delivered: " + iMqttDeliveryToken.toString());
}
}
/**
* Connect in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class MqttConnector {
public MqttConnector() {
}
public void doConnect() {
// Connect to the server
// Get a token and setup an asynchronous listener on the token which
// will be notified once the connect completes
if (log.isDebugEnabled()) {
log.debug("Connecting with client ID " + client.getClientId());
}
IMqttActionListener conListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
state = CONNECTED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.warn("Client " + client.getClientId() + " connection failed." + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
// Connect using a non-blocking connect
client.connect(conOpt, "Connect sample context", conListener);
} catch (MqttException e) {
// If though it is a non-blocking connect an exception can be
// thrown if validation of parms fails or other checks such
// as already connected fail.
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Publish in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Publisher {
public void doPublish(String topicName, MqttMessage message) {
// Send / publish a message to the server
// Get a token and setup an asynchronous listener on the token which
// will be notified once the message has been delivered
// MqttMessage message = new MqttMessage(payload);
//message.setQos(qos);
String time = new Timestamp(System.currentTimeMillis()).toString();
// Setup a listener object to be notified when the publish completes.
IMqttActionListener pubListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
state = PUBLISHED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.warn("Message publishing failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
// Publish the message
client.publish(topicName, message, "Pub sample context", pubListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Subscribe in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Subscriber {
public void doSubscribe(String topicName, int qos) {
// Make a subscription
// Get a token and setup an asynchronous listener on the token which
// will be notified once the subscription is in place.
if (log.isDebugEnabled()) {
log.debug("Subscribing to topic \"" + topicName + "\" qos " + qos);
}
IMqttActionListener subListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
state = SUBSCRIBED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.warn("Topic subscription failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
client.subscribe(topicName, qos, "Subscribe sample context", subListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Disconnect in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Disconnector {
public void doDisconnect() {
// Disconnect the client
if (log.isDebugEnabled()) {
log.debug("Disconnecting client " + client.getClientId());
}
IMqttActionListener discListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
state = DISCONNECTED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.warn("Disconnection failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
client.disconnect("Disconnect sample context", discListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.node;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.oneOf;
public class DiscoveryNodesTests extends ESTestCase {
public void testResolveNodeByIdOrName() {
DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
DiscoveryNode[] nodes = discoveryNodes.getNodes().values().toArray(DiscoveryNode.class);
DiscoveryNode node = randomFrom(nodes);
DiscoveryNode resolvedNode = discoveryNodes.resolveNode(randomBoolean() ? node.getId() : node.getName());
assertThat(resolvedNode.getId(), equalTo(node.getId()));
}
public void testResolveNodeByAttribute() {
DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
NodeSelector nodeSelector = randomFrom(NodeSelector.values());
Set<String> matchingNodeIds = nodeSelector.matchingNodeIds(discoveryNodes);
try {
DiscoveryNode resolvedNode = discoveryNodes.resolveNode(nodeSelector.selector);
assertThat(matchingNodeIds.size(), equalTo(1));
assertThat(resolvedNode.getId(), equalTo(matchingNodeIds.iterator().next()));
} catch (IllegalArgumentException e) {
if (matchingNodeIds.size() == 0) {
assertThat(e.getMessage(), equalTo("failed to resolve [" + nodeSelector.selector + "], no matching nodes"));
} else if (matchingNodeIds.size() > 1) {
assertThat(e.getMessage(), containsString("where expected to be resolved to a single node"));
} else {
fail("resolveNode shouldn't have failed for [" + nodeSelector.selector + "]");
}
}
}
public void testResolveNodesNull() {
DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
// if assertions are enabled (should be the case for tests, but not in production), resolving null throws
expectThrows(AssertionError.class, () -> discoveryNodes.resolveNodes(Collections.singletonList(null).toArray(new String[0])));
expectThrows(AssertionError.class, () -> discoveryNodes.resolveNodes(null, "someNode"));
expectThrows(AssertionError.class, () -> discoveryNodes.resolveNodes("someNode", null, "someOtherNode"));
}
public void testAll() {
final DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
final String[] allNodes =
StreamSupport.stream(discoveryNodes.spliterator(), false).map(DiscoveryNode::getId).toArray(String[]::new);
assertThat(discoveryNodes.resolveNodes(), arrayContainingInAnyOrder(allNodes));
assertThat(discoveryNodes.resolveNodes(new String[0]), arrayContainingInAnyOrder(allNodes));
assertThat(discoveryNodes.resolveNodes("_all"), arrayContainingInAnyOrder(allNodes));
final String[] nonMasterNodes =
StreamSupport.stream(discoveryNodes.getNodes().values().spliterator(), false)
.map(n -> n.value)
.filter(n -> n.isMasterNode() == false)
.map(DiscoveryNode::getId)
.toArray(String[]::new);
assertThat(discoveryNodes.resolveNodes("_all", "master:false"), arrayContainingInAnyOrder(nonMasterNodes));
assertThat(discoveryNodes.resolveNodes("master:false", "_all"), arrayContainingInAnyOrder(allNodes));
}
public void testCoordinatorOnlyNodes() {
final DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
final String[] coordinatorOnlyNodes =
StreamSupport.stream(discoveryNodes.getNodes().values().spliterator(), false)
.map(n -> n.value)
.filter(n -> n.isDataNode() == false && n.isIngestNode() == false && n.isMasterNode() == false)
.map(DiscoveryNode::getId)
.toArray(String[]::new);
final String[] nonCoordinatorOnlyNodes =
StreamSupport.stream(discoveryNodes.getNodes().values().spliterator(), false)
.map(n -> n.value)
.filter(n -> n.isMasterNode() || n.isDataNode() || n.isIngestNode())
.map(DiscoveryNode::getId)
.toArray(String[]::new);
assertThat(discoveryNodes.resolveNodes("coordinating_only:true"), arrayContainingInAnyOrder(coordinatorOnlyNodes));
assertThat(discoveryNodes.resolveNodes("_all", "data:false", "ingest:false", "master:false"),
arrayContainingInAnyOrder(coordinatorOnlyNodes));
assertThat(discoveryNodes.resolveNodes("_all", "coordinating_only:false"), arrayContainingInAnyOrder(nonCoordinatorOnlyNodes));
}
public void testResolveNodesIds() {
DiscoveryNodes discoveryNodes = buildDiscoveryNodes();
int numSelectors = randomIntBetween(1, 5);
Set<String> nodeSelectors = new HashSet<>();
Set<String> expectedNodeIdsSet = new HashSet<>();
for (int i = 0; i < numSelectors; i++) {
NodeSelector nodeSelector = randomFrom(NodeSelector.values());
if (nodeSelectors.add(nodeSelector.selector)) {
expectedNodeIdsSet.addAll(nodeSelector.matchingNodeIds(discoveryNodes));
}
}
int numNodeIds = randomIntBetween(0, 3);
String[] nodeIds = discoveryNodes.getNodes().keys().toArray(String.class);
for (int i = 0; i < numNodeIds; i++) {
String nodeId = randomFrom(nodeIds);
nodeSelectors.add(nodeId);
expectedNodeIdsSet.add(nodeId);
}
int numNodeNames = randomIntBetween(0, 3);
DiscoveryNode[] nodes = discoveryNodes.getNodes().values().toArray(DiscoveryNode.class);
for (int i = 0; i < numNodeNames; i++) {
DiscoveryNode discoveryNode = randomFrom(nodes);
nodeSelectors.add(discoveryNode.getName());
expectedNodeIdsSet.add(discoveryNode.getId());
}
String[] resolvedNodesIds = discoveryNodes.resolveNodes(nodeSelectors.toArray(new String[nodeSelectors.size()]));
Arrays.sort(resolvedNodesIds);
String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]);
Arrays.sort(expectedNodesIds);
assertThat(resolvedNodesIds, equalTo(expectedNodesIds));
}
public void testMastersFirst() {
final List<DiscoveryNode> inputNodes = randomNodes(10);
final DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
inputNodes.forEach(discoBuilder::add);
final List<DiscoveryNode> returnedNodes = discoBuilder.build().mastersFirstStream().collect(Collectors.toList());
assertEquals(returnedNodes.size(), inputNodes.size());
assertEquals(new HashSet<>(returnedNodes), new HashSet<>(inputNodes));
final List<DiscoveryNode> sortedNodes = new ArrayList<>(returnedNodes);
Collections.sort(sortedNodes, Comparator.comparing(n -> n.isMasterNode() == false));
assertEquals(sortedNodes, returnedNodes);
}
public void testDeltaListsMultipleNodes() {
final List<DiscoveryNode> discoveryNodes = randomNodes(3);
final DiscoveryNodes nodes0 = DiscoveryNodes.builder().add(discoveryNodes.get(0)).build();
final DiscoveryNodes nodes01 = DiscoveryNodes.builder(nodes0).add(discoveryNodes.get(1)).build();
final DiscoveryNodes nodes012 = DiscoveryNodes.builder(nodes01).add(discoveryNodes.get(2)).build();
assertThat(nodes01.delta(nodes0).shortSummary(), equalTo("added {" + discoveryNodes.get(1) + "}"));
assertThat(nodes012.delta(nodes0).shortSummary(), oneOf(
"added {" + discoveryNodes.get(1) + "," + discoveryNodes.get(2) + "}",
"added {" + discoveryNodes.get(2) + "," + discoveryNodes.get(1) + "}"));
assertThat(nodes0.delta(nodes01).shortSummary(), equalTo("removed {" + discoveryNodes.get(1) + "}"));
assertThat(nodes0.delta(nodes012).shortSummary(), oneOf(
"removed {" + discoveryNodes.get(1) + "," + discoveryNodes.get(2) + "}",
"removed {" + discoveryNodes.get(2) + "," + discoveryNodes.get(1) + "}"));
final DiscoveryNodes nodes01Local = DiscoveryNodes.builder(nodes01).localNodeId(discoveryNodes.get(1).getId()).build();
final DiscoveryNodes nodes02Local = DiscoveryNodes.builder(nodes012).localNodeId(discoveryNodes.get(1).getId()).build();
assertThat(nodes01Local.delta(nodes0).shortSummary(), equalTo(""));
assertThat(nodes02Local.delta(nodes0).shortSummary(), equalTo("added {" + discoveryNodes.get(2) + "}"));
assertThat(nodes0.delta(nodes01Local).shortSummary(), equalTo("removed {" + discoveryNodes.get(1) + "}"));
assertThat(nodes0.delta(nodes02Local).shortSummary(), oneOf(
"removed {" + discoveryNodes.get(1) + "," + discoveryNodes.get(2) + "}",
"removed {" + discoveryNodes.get(2) + "," + discoveryNodes.get(1) + "}"));
}
public void testDeltas() {
Set<DiscoveryNode> nodesA = new HashSet<>();
nodesA.addAll(randomNodes(1 + randomInt(10)));
Set<DiscoveryNode> nodesB = new HashSet<>();
nodesB.addAll(randomNodes(1 + randomInt(5)));
for (DiscoveryNode node : randomSubsetOf(nodesA)) {
if (randomBoolean()) {
// change an attribute
Map<String, String> attrs = new HashMap<>(node.getAttributes());
attrs.put("new", "new");
final TransportAddress nodeAddress = node.getAddress();
node = new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), nodeAddress.address().getHostString(),
nodeAddress.getAddress(), nodeAddress, attrs, node.getRoles(), node.getVersion());
}
nodesB.add(node);
}
DiscoveryNode masterA = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesA);
DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB);
DiscoveryNodes.Builder builderA = DiscoveryNodes.builder();
nodesA.stream().forEach(builderA::add);
final String masterAId = masterA == null ? null : masterA.getId();
builderA.masterNodeId(masterAId);
builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId());
DiscoveryNodes.Builder builderB = DiscoveryNodes.builder();
nodesB.stream().forEach(builderB::add);
final String masterBId = masterB == null ? null : masterB.getId();
builderB.masterNodeId(masterBId);
builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId());
final DiscoveryNodes discoNodesA = builderA.build();
final DiscoveryNodes discoNodesB = builderB.build();
logger.info("nodes A: {}", discoNodesA);
logger.info("nodes B: {}", discoNodesB);
DiscoveryNodes.Delta delta = discoNodesB.delta(discoNodesA);
if (masterA == null) {
assertThat(delta.previousMasterNode(), nullValue());
} else {
assertThat(delta.previousMasterNode().getId(), equalTo(masterAId));
}
if (masterB == null) {
assertThat(delta.newMasterNode(), nullValue());
} else {
assertThat(delta.newMasterNode().getId(), equalTo(masterBId));
}
if (Objects.equals(masterAId, masterBId)) {
assertFalse(delta.masterNodeChanged());
} else {
assertTrue(delta.masterNodeChanged());
}
Set<DiscoveryNode> newNodes = new HashSet<>(nodesB);
newNodes.removeAll(nodesA);
assertThat(delta.added(), equalTo(newNodes.isEmpty() == false));
assertThat(delta.addedNodes(), containsInAnyOrder(newNodes.stream().collect(Collectors.toList()).toArray()));
assertThat(delta.addedNodes().size(), equalTo(newNodes.size()));
Set<DiscoveryNode> removedNodes = new HashSet<>(nodesA);
removedNodes.removeAll(nodesB);
assertThat(delta.removed(), equalTo(removedNodes.isEmpty() == false));
assertThat(delta.removedNodes(), containsInAnyOrder(removedNodes.stream().collect(Collectors.toList()).toArray()));
assertThat(delta.removedNodes().size(), equalTo(removedNodes.size()));
}
private static AtomicInteger idGenerator = new AtomicInteger();
private static List<DiscoveryNode> randomNodes(final int numNodes) {
List<DiscoveryNode> nodesList = new ArrayList<>();
for (int i = 0; i < numNodes; i++) {
Map<String, String> attributes = new HashMap<>();
if (frequently()) {
attributes.put("custom", randomBoolean() ? "match" : randomAlphaOfLengthBetween(3, 5));
}
final Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES));
if (frequently()) {
roles.add(new DiscoveryNodeRole("custom_role", "cr") {
@Override
public Setting<Boolean> legacySetting() {
return null;
}
});
}
final DiscoveryNode node = newNode(idGenerator.getAndIncrement(), attributes, roles);
nodesList.add(node);
}
return nodesList;
}
private static DiscoveryNodes buildDiscoveryNodes() {
int numNodes = randomIntBetween(1, 10);
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
List<DiscoveryNode> nodesList = randomNodes(numNodes);
for (DiscoveryNode node : nodesList) {
discoBuilder = discoBuilder.add(node);
}
discoBuilder.localNodeId(randomFrom(nodesList).getId());
discoBuilder.masterNodeId(randomFrom(nodesList).getId());
return discoBuilder.build();
}
private static DiscoveryNode newNode(int nodeId, Map<String, String> attributes, Set<DiscoveryNodeRole> roles) {
return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, buildNewFakeTransportAddress(), attributes, roles,
Version.CURRENT);
}
private enum NodeSelector {
LOCAL("_local") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
return Collections.singleton(nodes.getLocalNodeId());
}
}, ELECTED_MASTER("_master") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
return Collections.singleton(nodes.getMasterNodeId());
}
}, MASTER_ELIGIBLE(DiscoveryNodeRole.MASTER_ROLE.roleName() + ":true") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getMasterNodes().keysIt().forEachRemaining(ids::add);
return ids;
}
}, DATA(DiscoveryNodeRole.DATA_ROLE.roleName() + ":true") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getDataNodes().keysIt().forEachRemaining(ids::add);
return ids;
}
}, INGEST(DiscoveryNodeRole.INGEST_ROLE.roleName() + ":true") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getIngestNodes().keysIt().forEachRemaining(ids::add);
return ids;
}
}, COORDINATING_ONLY(DiscoveryNode.COORDINATING_ONLY + ":true") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getCoordinatingOnlyNodes().keysIt().forEachRemaining(ids::add);
return ids;
}
}, CUSTOM_ATTRIBUTE("attr:value") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getNodes().valuesIt().forEachRemaining(node -> {
if ("value".equals(node.getAttributes().get("attr"))) {
ids.add(node.getId());
}
});
return ids;
}
}, CUSTOM_ROLE("custom_role:true") {
@Override
Set<String> matchingNodeIds(DiscoveryNodes nodes) {
Set<String> ids = new HashSet<>();
nodes.getNodes().valuesIt().forEachRemaining(node -> {
if (node.getRoles().stream().anyMatch(role -> role.roleName().equals("custom_role"))) {
ids.add(node.getId());
}
});
return ids;
}
};
private final String selector;
NodeSelector(String selector) {
this.selector = selector;
}
abstract Set<String> matchingNodeIds(DiscoveryNodes nodes);
}
public void testMaxMinNodeVersion() {
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
discoBuilder.add(new DiscoveryNode("name_" + 1, "node_" + 1, buildNewFakeTransportAddress(), Collections.emptyMap(),
new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES)),
Version.fromString("5.1.0")));
discoBuilder.add(new DiscoveryNode("name_" + 2, "node_" + 2, buildNewFakeTransportAddress(), Collections.emptyMap(),
new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES)),
Version.fromString("6.3.0")));
discoBuilder.add(new DiscoveryNode("name_" + 3, "node_" + 3, buildNewFakeTransportAddress(), Collections.emptyMap(),
new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES)),
Version.fromString("1.1.0")));
discoBuilder.localNodeId("name_1");
discoBuilder.masterNodeId("name_2");
DiscoveryNodes build = discoBuilder.build();
assertEquals( Version.fromString("6.3.0"), build.getMaxNodeVersion());
assertEquals( Version.fromString("1.1.0"), build.getMinNodeVersion());
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbDiagram;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
/**
* This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.EsbDiagram} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class EsbDiagramItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider,
IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EsbDiagramItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated NOT
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors != null) {
itemPropertyDescriptors.clear();
}
super.getPropertyDescriptors(object);
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Test feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addTestPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EsbDiagram_Test_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EsbDiagram_Test_feature", "_UI_EsbDiagram_type"),
EsbPackage.Literals.ESB_DIAGRAM__TEST,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(EsbPackage.Literals.ESB_DIAGRAM__SERVER);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns EsbDiagram.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/EsbDiagram"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
Integer labelValue = ((EsbDiagram)object).getTest();
String label = labelValue == null ? null : labelValue.toString();
return label == null || label.length() == 0 ?
getString("_UI_EsbDiagram_type") :
getString("_UI_EsbDiagram_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(EsbDiagram.class)) {
case EsbPackage.ESB_DIAGRAM__TEST:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case EsbPackage.ESB_DIAGRAM__SERVER:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(EsbPackage.Literals.ESB_DIAGRAM__SERVER,
EsbFactory.eINSTANCE.createEsbServer()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return EsbEditPlugin.INSTANCE;
}
}
| |
package org.finra.herd.service.impl;
import static org.finra.herd.dao.AbstractDaoTest.SECURITY_FUNCTION;
import static org.finra.herd.dao.AbstractDaoTest.SECURITY_ROLE;
import static org.finra.herd.service.AbstractServiceTest.ID;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.finra.herd.dao.SecurityRoleFunctionDao;
import org.finra.herd.model.AlreadyExistsException;
import org.finra.herd.model.api.xml.SecurityFunctionKey;
import org.finra.herd.model.api.xml.SecurityRoleFunction;
import org.finra.herd.model.api.xml.SecurityRoleFunctionCreateRequest;
import org.finra.herd.model.api.xml.SecurityRoleFunctionKey;
import org.finra.herd.model.api.xml.SecurityRoleFunctionKeys;
import org.finra.herd.model.api.xml.SecurityRoleKey;
import org.finra.herd.model.jpa.SecurityFunctionEntity;
import org.finra.herd.model.jpa.SecurityRoleEntity;
import org.finra.herd.model.jpa.SecurityRoleFunctionEntity;
import org.finra.herd.service.helper.SecurityFunctionDaoHelper;
import org.finra.herd.service.helper.SecurityFunctionHelper;
import org.finra.herd.service.helper.SecurityRoleDaoHelper;
import org.finra.herd.service.helper.SecurityRoleFunctionDaoHelper;
import org.finra.herd.service.helper.SecurityRoleFunctionHelper;
import org.finra.herd.service.helper.SecurityRoleHelper;
public class SecurityRoleFunctionServiceImplTest
{
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Mock
private SecurityFunctionDaoHelper securityFunctionDaoHelper;
@Mock
private SecurityFunctionHelper securityFunctionHelper;
@Mock
private SecurityRoleDaoHelper securityRoleDaoHelper;
@Mock
private SecurityRoleFunctionDao securityRoleFunctionDao;
@Mock
private SecurityRoleFunctionDaoHelper securityRoleFunctionDaoHelper;
@Mock
private SecurityRoleFunctionHelper securityRoleFunctionHelper;
@InjectMocks
private SecurityRoleFunctionServiceImpl securityRoleFunctionService;
@Mock
private SecurityRoleHelper securityRoleHelper;
@Before
public void before()
{
MockitoAnnotations.initMocks(this);
}
@Test
public void testCreateSecurityRoleFunction()
{
// Create a security role to function mapping key.
SecurityRoleFunctionKey securityRoleFunctionKey = new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION);
// Create a security role to function mapping create request.
SecurityRoleFunctionCreateRequest securityRoleFunctionCreateRequest = new SecurityRoleFunctionCreateRequest(securityRoleFunctionKey);
// Create a security role entity.
SecurityRoleEntity securityRoleEntity = new SecurityRoleEntity();
securityRoleEntity.setCode(SECURITY_ROLE);
// Create a security function entity.
SecurityFunctionEntity securityFunctionEntity = new SecurityFunctionEntity();
securityFunctionEntity.setCode(SECURITY_FUNCTION);
// Mock the external calls.
when(securityRoleFunctionDao.getSecurityRoleFunctionByKey(securityRoleFunctionKey)).thenReturn(null);
when(securityRoleDaoHelper.getSecurityRoleEntity(SECURITY_ROLE)).thenReturn(securityRoleEntity);
when(securityFunctionDaoHelper.getSecurityFunctionEntity(SECURITY_FUNCTION)).thenReturn(securityFunctionEntity);
doAnswer(new Answer<Void>()
{
public Void answer(InvocationOnMock invocation)
{
// Get the security role to function mapping entity and set its primary key.
SecurityRoleFunctionEntity securityRoleFunctionEntity = (SecurityRoleFunctionEntity) invocation.getArguments()[0];
securityRoleFunctionEntity.setId(ID);
return null;
}
}).when(securityRoleFunctionDao).saveAndRefresh(any(SecurityRoleFunctionEntity.class));
// Call the method under test.
SecurityRoleFunction result = securityRoleFunctionService.createSecurityRoleFunction(securityRoleFunctionCreateRequest);
// Validate the results.
assertEquals(new SecurityRoleFunction(result.getId(), securityRoleFunctionKey), result);
// Verify the external calls.
verify(securityRoleFunctionHelper).validateAndTrimSecurityRoleFunctionCreateRequest(securityRoleFunctionCreateRequest);
verify(securityRoleFunctionDao).getSecurityRoleFunctionByKey(securityRoleFunctionKey);
verify(securityRoleDaoHelper).getSecurityRoleEntity(SECURITY_ROLE);
verify(securityFunctionDaoHelper).getSecurityFunctionEntity(SECURITY_FUNCTION);
verify(securityRoleFunctionDao).saveAndRefresh(any(SecurityRoleFunctionEntity.class));
verifyNoMoreInteractionsHelper();
}
@Test
public void testCreateSecurityRoleFunctionSecurityRoleFunctionAlreadyExists()
{
// Create a security role to function mapping key.
SecurityRoleFunctionKey securityRoleFunctionKey = new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION);
// Create a security role to function mapping create request.
SecurityRoleFunctionCreateRequest securityRoleFunctionCreateRequest = new SecurityRoleFunctionCreateRequest(securityRoleFunctionKey);
// Create a security role to function mapping entity.
SecurityRoleFunctionEntity securityRoleFunctionEntity = new SecurityRoleFunctionEntity();
// Mock the external calls.
when(securityRoleFunctionDao.getSecurityRoleFunctionByKey(securityRoleFunctionKey)).thenReturn(securityRoleFunctionEntity);
// Specify the expected exception.
expectedException.expect(AlreadyExistsException.class);
expectedException.expectMessage(String.format(
"Unable to create security role to function mapping for \"%s\" security role name and \"%s\" security function name because it already exists.",
SECURITY_ROLE, SECURITY_FUNCTION));
// Call the method under test.
securityRoleFunctionService.createSecurityRoleFunction(securityRoleFunctionCreateRequest);
// Verify the external calls.
verify(securityRoleFunctionHelper).validateAndTrimSecurityRoleFunctionCreateRequest(securityRoleFunctionCreateRequest);
verify(securityRoleFunctionDao).getSecurityRoleFunctionByKey(securityRoleFunctionKey);
verifyNoMoreInteractionsHelper();
}
@Test
public void testDeleteSecurityRoleFunction()
{
// Create a security role to function mapping key.
SecurityRoleFunctionKey securityRoleFunctionKey = new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION);
// Create a security role entity.
SecurityRoleEntity securityRoleEntity = new SecurityRoleEntity();
securityRoleEntity.setCode(SECURITY_ROLE);
// Create a security function entity.
SecurityFunctionEntity securityFunctionEntity = new SecurityFunctionEntity();
securityFunctionEntity.setCode(SECURITY_FUNCTION);
// Create a security role to function mapping entity.
SecurityRoleFunctionEntity securityRoleFunctionEntity = new SecurityRoleFunctionEntity();
securityRoleFunctionEntity.setId(ID);
securityRoleFunctionEntity.setSecurityRole(securityRoleEntity);
securityRoleFunctionEntity.setSecurityFunction(securityFunctionEntity);
// Mock the external calls.
when(securityRoleFunctionDaoHelper.getSecurityRoleFunctionEntity(securityRoleFunctionKey)).thenReturn(securityRoleFunctionEntity);
// Call the method under test.
SecurityRoleFunction result = securityRoleFunctionService.deleteSecurityRoleFunction(securityRoleFunctionKey);
// Validate the results.
assertEquals(new SecurityRoleFunction(ID, securityRoleFunctionKey), result);
// Verify the external calls.
verify(securityRoleFunctionHelper).validateAndTrimSecurityRoleFunctionKey(securityRoleFunctionKey);
verify(securityRoleFunctionDaoHelper).getSecurityRoleFunctionEntity(securityRoleFunctionKey);
verify(securityRoleFunctionDao).delete(securityRoleFunctionEntity);
verifyNoMoreInteractionsHelper();
}
@Test
public void testGetSecurityRoleFunction()
{
// Create a security role to function mapping key.
SecurityRoleFunctionKey securityRoleFunctionKey = new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION);
// Create a security role entity.
SecurityRoleEntity securityRoleEntity = new SecurityRoleEntity();
securityRoleEntity.setCode(SECURITY_ROLE);
// Create a security function entity.
SecurityFunctionEntity securityFunctionEntity = new SecurityFunctionEntity();
securityFunctionEntity.setCode(SECURITY_FUNCTION);
// Create a security role to function mapping entity.
SecurityRoleFunctionEntity securityRoleFunctionEntity = new SecurityRoleFunctionEntity();
securityRoleFunctionEntity.setId(ID);
securityRoleFunctionEntity.setSecurityRole(securityRoleEntity);
securityRoleFunctionEntity.setSecurityFunction(securityFunctionEntity);
// Mock the external calls.
when(securityRoleFunctionDaoHelper.getSecurityRoleFunctionEntity(securityRoleFunctionKey)).thenReturn(securityRoleFunctionEntity);
// Call the method under test.
SecurityRoleFunction result = securityRoleFunctionService.getSecurityRoleFunction(securityRoleFunctionKey);
// Validate the results.
assertEquals(new SecurityRoleFunction(ID, securityRoleFunctionKey), result);
// Verify the external calls.
verify(securityRoleFunctionHelper).validateAndTrimSecurityRoleFunctionKey(securityRoleFunctionKey);
verify(securityRoleFunctionDaoHelper).getSecurityRoleFunctionEntity(securityRoleFunctionKey);
verifyNoMoreInteractionsHelper();
}
@Test
public void testGetSecurityRoleFunctions()
{
// Create a list of security role to function mapping keys.
List<SecurityRoleFunctionKey> securityRoleFunctionKeys = Collections.singletonList(new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION));
// Mock the external calls.
when(securityRoleFunctionDao.getSecurityRoleFunctionKeys()).thenReturn(securityRoleFunctionKeys);
// Call the method under test.
SecurityRoleFunctionKeys result = securityRoleFunctionService.getSecurityRoleFunctions();
// Validate the results.
assertEquals(new SecurityRoleFunctionKeys(securityRoleFunctionKeys), result);
// Verify the external calls.
verify(securityRoleFunctionDao).getSecurityRoleFunctionKeys();
verifyNoMoreInteractionsHelper();
}
@Test
public void testGetSecurityRoleFunctionsBySecurityFunction()
{
// Create a security function key.
SecurityFunctionKey securityFunctionKey = new SecurityFunctionKey(SECURITY_FUNCTION);
// Create a list of security role to function mapping keys.
List<SecurityRoleFunctionKey> securityRoleFunctionKeys = Collections.singletonList(new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION));
// Mock the external calls.
when(securityRoleFunctionDao.getSecurityRoleFunctionKeysBySecurityFunction(SECURITY_FUNCTION)).thenReturn(securityRoleFunctionKeys);
// Call the method under test.
SecurityRoleFunctionKeys result = securityRoleFunctionService.getSecurityRoleFunctionsBySecurityFunction(securityFunctionKey);
// Validate the results.
assertEquals(new SecurityRoleFunctionKeys(securityRoleFunctionKeys), result);
// Verify the external calls.
verify(securityFunctionHelper).validateAndTrimSecurityFunctionKey(securityFunctionKey);
verify(securityRoleFunctionDao).getSecurityRoleFunctionKeysBySecurityFunction(SECURITY_FUNCTION);
verifyNoMoreInteractionsHelper();
}
@Test
public void testGetSecurityRoleFunctionsBySecurityRole()
{
// Create a security role key.
SecurityRoleKey securityRoleKey = new SecurityRoleKey(SECURITY_ROLE);
// Create a list of security role to function mapping keys.
List<SecurityRoleFunctionKey> securityRoleFunctionKeys = Collections.singletonList(new SecurityRoleFunctionKey(SECURITY_ROLE, SECURITY_FUNCTION));
// Mock the external calls.
when(securityRoleFunctionDao.getSecurityRoleFunctionKeysBySecurityRole(SECURITY_ROLE)).thenReturn(securityRoleFunctionKeys);
// Call the method under test.
SecurityRoleFunctionKeys result = securityRoleFunctionService.getSecurityRoleFunctionsBySecurityRole(securityRoleKey);
// Validate the results.
assertEquals(new SecurityRoleFunctionKeys(securityRoleFunctionKeys), result);
// Verify the external calls.
verify(securityRoleHelper).validateAndTrimSecurityRoleKey(securityRoleKey);
verify(securityRoleFunctionDao).getSecurityRoleFunctionKeysBySecurityRole(SECURITY_ROLE);
verifyNoMoreInteractionsHelper();
}
/**
* Checks if any of the mocks has any interaction.
*/
private void verifyNoMoreInteractionsHelper()
{
verifyNoMoreInteractions(securityFunctionDaoHelper, securityFunctionHelper, securityRoleDaoHelper, securityRoleFunctionDao,
securityRoleFunctionDaoHelper, securityRoleFunctionHelper, securityRoleHelper);
}
}
| |
/*
* Copyright 2019 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.errorprone.fixes.SuggestedFixes.qualifyType;
import static com.google.errorprone.matchers.Matchers.allOf;
import static com.google.errorprone.matchers.Matchers.anyOf;
import static com.google.errorprone.matchers.Matchers.isSubtypeOf;
import static com.google.errorprone.matchers.Matchers.not;
import static com.google.errorprone.matchers.Matchers.nothing;
import static com.google.errorprone.matchers.method.MethodMatchers.constructor;
import static com.google.errorprone.matchers.method.MethodMatchers.instanceMethod;
import static com.google.errorprone.matchers.method.MethodMatchers.staticMethod;
import static com.google.errorprone.util.ASTHelpers.getSymbol;
import static com.google.errorprone.util.ASTHelpers.getType;
import static com.google.errorprone.util.ASTHelpers.methodCanBeOverridden;
import com.google.auto.value.AutoValue;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.errorprone.BugPattern;
import com.google.errorprone.BugPattern.SeverityLevel;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.CompilationUnitTreeMatcher;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.CompilationUnitTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.IdentifierTree;
import com.sun.source.tree.LambdaExpressionTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.ReturnTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.VariableTree;
import com.sun.source.util.TreePath;
import com.sun.source.util.TreePathScanner;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.lang.model.type.TypeKind;
/**
* Flags methods which return mutable collections from some code paths, but immutable ones from
* others.
*/
@BugPattern(
name = "MixedMutabilityReturnType",
summary =
"This method returns both mutable and immutable collections or maps from different "
+ "paths. This may be confusing for users of the method.",
severity = SeverityLevel.WARNING)
public final class MixedMutabilityReturnType extends BugChecker
implements CompilationUnitTreeMatcher {
private static final Matcher<ExpressionTree> IMMUTABLE_FACTORY =
staticMethod()
.onClass("java.util.Collections")
.namedAnyOf("emptyList", "emptyMap", "emptySet", "singleton", "singletonList");
private static final Matcher<ExpressionTree> EMPTY_INITIALIZER =
anyOf(
constructor().forClass("java.util.ArrayList").withNoParameters(),
constructor().forClass("java.util.HashMap").withNoParameters(),
staticMethod()
.onClass("com.google.common.collect.Lists")
.namedAnyOf("newArrayList", "newLinkedList")
.withNoParameters(),
staticMethod()
.onClass("com.google.common.collect.Sets")
.namedAnyOf("newHashSet", "newLinkedHashSet")
.withNoParameters());
private static final Matcher<ExpressionTree> IMMUTABLE =
anyOf(
IMMUTABLE_FACTORY,
isSubtypeOf(ImmutableCollection.class),
isSubtypeOf(ImmutableMap.class));
private static final Matcher<ExpressionTree> MUTABLE =
anyOf(
isSubtypeOf(ArrayList.class),
isSubtypeOf(LinkedHashSet.class),
isSubtypeOf(LinkedHashMap.class),
isSubtypeOf(LinkedList.class),
isSubtypeOf(HashMap.class),
isSubtypeOf(HashBiMap.class),
isSubtypeOf(TreeMap.class));
private static final Matcher<Tree> RETURNS_COLLECTION =
anyOf(isSubtypeOf(Collection.class), isSubtypeOf(Map.class));
private static final ImmutableMap<Matcher<Tree>, TypeDetails> REFACTORING_DETAILS =
ImmutableMap.of(
isSubtypeOf(BiMap.class),
TypeDetails.of(
"com.google.common.collect.ImmutableBiMap",
instanceMethod()
.onDescendantOf(BiMap.class.getName())
.namedAnyOf("put", "putAll"),
nothing()),
allOf(isSubtypeOf(Map.class), not(isSubtypeOf(BiMap.class))),
TypeDetails.of(
"com.google.common.collect.ImmutableMap",
instanceMethod().onDescendantOf(Map.class.getName()).namedAnyOf("put", "putAll"),
isSubtypeOf(SortedMap.class)),
isSubtypeOf(List.class),
TypeDetails.of(
"com.google.common.collect.ImmutableList",
instanceMethod().onDescendantOf(List.class.getName()).namedAnyOf("add", "addAll"),
nothing()),
isSubtypeOf(Set.class),
TypeDetails.of(
"com.google.common.collect.ImmutableSet",
instanceMethod().onDescendantOf(Set.class.getName()).namedAnyOf("add", "addAll"),
nothing()));
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
VariableMutabilityScanner variableMutabilityScanner = new VariableMutabilityScanner(state);
variableMutabilityScanner.scan(state.getPath(), null);
new ReturnTypesScanner(
state, variableMutabilityScanner.immutable, variableMutabilityScanner.mutable)
.scan(state.getPath(), null);
return Description.NO_MATCH;
}
private static final class VariableMutabilityScanner extends TreePathScanner<Void, Void> {
private final VisitorState state;
private final Set<VarSymbol> mutable = new HashSet<>();
private final Set<VarSymbol> immutable = new HashSet<>();
private VariableMutabilityScanner(VisitorState state) {
this.state = state;
}
@Override
public Void visitVariable(VariableTree variableTree, Void unused) {
VarSymbol symbol = getSymbol(variableTree);
ExpressionTree initializer = variableTree.getInitializer();
if (initializer != null
&& getType(initializer) != null
&& getType(initializer).getKind() != TypeKind.NULL
&& RETURNS_COLLECTION.matches(initializer, state)) {
if (IMMUTABLE.matches(initializer, state)) {
immutable.add(symbol);
}
if (MUTABLE.matches(initializer, state)) {
mutable.add(symbol);
}
}
return super.visitVariable(variableTree, unused);
}
}
private final class ReturnTypesScanner extends SuppressibleTreePathScanner<Void, Void> {
private final VisitorState state;
private final Set<VarSymbol> mutable;
private final Set<VarSymbol> immutable;
private ReturnTypesScanner(
VisitorState state, Set<VarSymbol> immutable, Set<VarSymbol> mutable) {
this.state = state;
this.immutable = immutable;
this.mutable = mutable;
}
@Override
public Void visitMethod(MethodTree methodTree, Void unused) {
if (!RETURNS_COLLECTION.matches(methodTree.getReturnType(), state)) {
return super.visitMethod(methodTree, unused);
}
MethodScanner scanner = new MethodScanner();
scanner.scan(getCurrentPath(), null);
if (!scanner.immutableReturns.isEmpty() && !scanner.mutableReturns.isEmpty()) {
state.reportMatch(
buildDescription(methodTree)
.addAllFixes(
generateFixes(
ImmutableList.<ReturnTree>builder()
.addAll(scanner.mutableReturns)
.addAll(scanner.immutableReturns)
.build(),
getCurrentPath(),
state))
.build());
}
return super.visitMethod(methodTree, unused);
}
private final class MethodScanner extends TreePathScanner<Void, Void> {
private final List<ReturnTree> immutableReturns = new ArrayList<>();
private final List<ReturnTree> mutableReturns = new ArrayList<>();
private boolean skipMethods = false;
@Override
public Void visitMethod(MethodTree node, Void unused) {
if (skipMethods) {
return null;
}
skipMethods = true;
return super.visitMethod(node, null);
}
@Override
public Void visitReturn(ReturnTree returnTree, Void unused) {
if (returnTree.getExpression() instanceof IdentifierTree) {
Symbol symbol = getSymbol(returnTree.getExpression());
if (mutable.contains(symbol)) {
mutableReturns.add(returnTree);
return super.visitReturn(returnTree, null);
}
if (immutable.contains(symbol)) {
immutableReturns.add(returnTree);
return super.visitReturn(returnTree, null);
}
}
Type type = getType(returnTree.getExpression());
if (type == null || type.getKind() == TypeKind.NULL) {
return super.visitReturn(returnTree, null);
}
if (IMMUTABLE.matches(returnTree.getExpression(), state)) {
immutableReturns.add(returnTree);
}
if (MUTABLE.matches(returnTree.getExpression(), state)) {
mutableReturns.add(returnTree);
}
return super.visitReturn(returnTree, null);
}
@Override
public Void visitLambdaExpression(LambdaExpressionTree node, Void unused) {
return null;
}
}
}
private static ImmutableList<SuggestedFix> generateFixes(
List<ReturnTree> returnTrees, TreePath methodTree, VisitorState state) {
SuggestedFix.Builder simpleFix = SuggestedFix.builder();
SuggestedFix.Builder fixWithBuilders = SuggestedFix.builder();
boolean anyBuilderFixes = false;
Matcher<Tree> returnTypeMatcher = null;
for (Map.Entry<Matcher<Tree>, TypeDetails> entry : REFACTORING_DETAILS.entrySet()) {
Tree returnType = ((MethodTree) methodTree.getLeaf()).getReturnType();
Matcher<Tree> matcher = entry.getKey();
if (matcher.matches(returnType, state)) {
// Only change the return type if the method is not overridable, otherwise this could
// break builds.
if (!methodCanBeOverridden(getSymbol((MethodTree) methodTree.getLeaf()))) {
SuggestedFix.Builder fixBuilder = SuggestedFix.builder();
fixBuilder.replace(
ASTHelpers.getErasedTypeTree(returnType),
qualifyType(state, fixBuilder, entry.getValue().immutableType()));
simpleFix.merge(fixBuilder);
fixWithBuilders.merge(fixBuilder);
}
returnTypeMatcher = isSubtypeOf(entry.getValue().immutableType());
break;
}
}
if (returnTypeMatcher == null) {
return ImmutableList.of();
}
for (ReturnTree returnTree : returnTrees) {
if (returnTypeMatcher.matches(returnTree.getExpression(), state)) {
break;
}
for (Map.Entry<Matcher<Tree>, TypeDetails> entry : REFACTORING_DETAILS.entrySet()) {
Matcher<Tree> predicate = entry.getKey();
TypeDetails typeDetails = entry.getValue();
ExpressionTree expression = returnTree.getExpression();
// Skip already immutable returns.
if (!predicate.matches(expression, state)) {
continue;
}
if (expression instanceof IdentifierTree) {
SuggestedFix simple = applySimpleFix(typeDetails.immutableType(), expression, state);
// If we're returning an identifier of this mutable type, try to turn it into a Builder.
ReturnTypeFixer returnTypeFixer =
new ReturnTypeFixer(getSymbol(expression), typeDetails, state);
returnTypeFixer.scan(methodTree, null);
anyBuilderFixes |= !returnTypeFixer.failed;
simpleFix.merge(simple);
fixWithBuilders.merge(returnTypeFixer.failed ? simple : returnTypeFixer.fix.build());
break;
}
if (IMMUTABLE_FACTORY.matches(expression, state)) {
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.replace(
((MethodInvocationTree) expression).getMethodSelect(),
qualifyType(state, fix, typeDetails.immutableType()) + ".of");
simpleFix.merge(fix);
fixWithBuilders.merge(fix);
break;
}
SuggestedFix simple = applySimpleFix(typeDetails.immutableType(), expression, state);
simpleFix.merge(simple);
fixWithBuilders.merge(simple);
break;
}
}
if (!anyBuilderFixes) {
return ImmutableList.of(simpleFix.build());
}
return ImmutableList.of(
simpleFix.build(),
fixWithBuilders
.setShortDescription(
"Fix using builders. Warning: this may change behaviour "
+ "if duplicate keys are added to ImmutableMap.Builder.")
.build());
}
private static SuggestedFix applySimpleFix(
String immutableType, ExpressionTree expression, VisitorState state) {
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.replace(
expression,
String.format(
"%s.copyOf(%s)",
qualifyType(state, fix, immutableType), state.getSourceForNode(expression)));
return fix.build();
}
private static final class ReturnTypeFixer extends TreePathScanner<Void, Void> {
private final Symbol symbol;
private final TypeDetails details;
private final VisitorState state;
private final SuggestedFix.Builder fix = SuggestedFix.builder();
private boolean builderifiedVariable = false;
private boolean failed = false;
private ReturnTypeFixer(Symbol symbol, TypeDetails details, VisitorState state) {
this.symbol = symbol;
this.details = details;
this.state = state;
}
@Override
public Void visitVariable(VariableTree variableTree, Void unused) {
if (!getSymbol(variableTree).equals(symbol)) {
return super.visitVariable(variableTree, null);
}
if (variableTree.getInitializer() == null
|| !EMPTY_INITIALIZER.matches(variableTree.getInitializer(), state)
|| details.skipTypes().matches(variableTree.getInitializer(), state)) {
failed = true;
return null;
}
Tree erasedType = ASTHelpers.getErasedTypeTree(variableTree.getType());
// don't try to replace synthetic nodes for `var`
if (ASTHelpers.getStartPosition(erasedType) != -1) {
fix.replace(erasedType, qualifyType(state, fix, details.builderType()));
}
if (variableTree.getInitializer() != null) {
fix.replace(
variableTree.getInitializer(),
qualifyType(state, fix, details.immutableType()) + ".builder()");
}
builderifiedVariable = true;
return super.visitVariable(variableTree, null);
}
@Override
public Void visitIdentifier(IdentifierTree identifier, Void unused) {
Tree parent = getCurrentPath().getParentPath().getLeaf();
if (!getSymbol(identifier).equals(symbol)) {
return null;
}
if (parent instanceof VariableTree) {
VariableTree variable = (VariableTree) parent;
fix.replace(variable.getType(), qualifyType(state, fix, details.builderType()));
return null;
}
if (parent instanceof MemberSelectTree) {
Tree grandParent = getCurrentPath().getParentPath().getParentPath().getLeaf();
if (grandParent instanceof MethodInvocationTree) {
if (!details.appendMethods().matches((MethodInvocationTree) grandParent, state)) {
failed = true;
return null;
}
}
return null;
}
if (!builderifiedVariable) {
failed = true;
return null;
}
if (parent instanceof ReturnTree) {
fix.postfixWith(identifier, ".build()");
}
return null;
}
}
@AutoValue
abstract static class TypeDetails {
abstract String immutableType();
abstract String builderType();
abstract Matcher<ExpressionTree> appendMethods();
abstract Matcher<Tree> skipTypes();
static TypeDetails of(
String immutableType, Matcher<ExpressionTree> appendMethods, Matcher<Tree> skipTypes) {
return new AutoValue_MixedMutabilityReturnType_TypeDetails(
immutableType, immutableType + ".Builder", appendMethods, skipTypes);
}
}
}
| |
package org.fountanio.juancode.eng;
import java.awt.Color;
import java.awt.Font;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import org.cjaf.microcode.helper.GLHelper;
import org.lwjgl.openal.AL;
import org.lwjgl.openal.AL10;
import org.lwjgl.opengl.Display;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.UnicodeFont;
import org.newdawn.slick.font.effects.ColorEffect;
import org.newdawn.slick.opengl.Texture;
import static org.lwjgl.opengl.GL11.*;
/**
* 2D Game Engine
* @author MicroCode
* @version 1.0
*/
public class Engine {
@SuppressWarnings("unchecked")
public static void say(String text, float x, float y, Font font, Color color) {
glDisable(GL_TEXTURE_2D);
try {
UnicodeFont fnt = new UnicodeFont(font);
fnt.addAsciiGlyphs();
fnt.addGlyphs(400, 600);
fnt.getEffects().add(new ColorEffect(color));
fnt.loadGlyphs();
fnt.drawString(x, y, text);
} catch (SlickException eee) {
eee.printStackTrace();
Display.destroy(); AL.destroy();
System.exit(1);
}
glEnable(GL_TEXTURE_2D);
}
/**
* Draws a texture
* @param texture - org.newdawn.slick.opengl.Texture texture
* @param x
* @param y
* @param w - width
* @param h - height
* @since 1.0 Beta
*/
public static void draw(org.newdawn.slick.opengl.Texture texture, float x, float y, float w, float h) {
GLHelper.drawTexture(texture, x, y, w, h);
}
/**
* Draws a texture from a spritesheet
* @param ss - spritesheet
* @param xtile - the x coordinate of the section
* @param ytile - the y coordinate of the section
* @param x
* @param y
* @param w
* @param h
* @since 1.0 Beta
*/
public static void draw(SpriteSheet ss, int xtile, int ytile, float x, float y,
float w, float h) {
Texture tex = ss.tex;
float xt = (xtile * ss.tW);
float yt = (ytile * ss.tH);
glBindTexture(GL_TEXTURE_2D, tex.getTextureID());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBegin(GL_QUADS);
{
glTexCoord2f((xt) / ss.tex.getTextureWidth(), (yt) / ss.tex.getTextureHeight());
glVertex2f(x, y);
glTexCoord2f((xt + ss.tW) / ss.tex.getTextureWidth(), (yt) / ss.tex.getTextureHeight());
glVertex2f(x + w, y);
glTexCoord2f((xt + ss.tW) / ss.tex.getTextureWidth(), (yt + ss.tH) / ss.tex.getTextureHeight());
glVertex2f(x + w, y + h);
glTexCoord2f((xt) / ss.tex.getTextureWidth(), (yt + ss.tH) / ss.tex.getTextureHeight());
glVertex2f(x, y + h);
}
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
}
public static Texture get(String texture) {
return GLHelper.getTexture(texture);
}
/**
* Gets the directory for AppData
* @return - directory for AppData
* @since 1.0 Beta
*/
public static String getAppDataDir() {
String osname = System.getProperty("os.name").toUpperCase();
if (osname.contains("WIN")) { // if windows:
return System.getenv("AppData");
} else if (osname.contains("MAC")) { // if mac
return System.getProperty("os.home");
} else { // if linux
return System.getProperty("os.home") + "/Library/Application Support";
}
}
/**
* Creates a folder in a directory
* @param directory - directory in which you desire to create the folder
* @param name - the name of the folder you want to make
* @since 1.0 Beta
*/
public static void makeFolder(String directory, String name) {
try {
if (directory == null) {
throw new NullPointerException("directory cannot be null!");
} else {
// create the folder
if (name == null) {
throw new NullPointerException("folder name cannot be null!" + "\n:" + directory);
} else new File(directory + name).mkdirs();
}
} catch (Exception e) {
e.printStackTrace();
Display.destroy();
AL.destroy();
System.exit(1);
}
}
public static void makeFile(File file) {
try {
file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
Display.destroy();
AL.destroy();
System.exit(1);
}
}
public static void release(Sound sound) {
AL10.alDeleteSources(sound.source());
}
public static void release(org.newdawn.slick.opengl.Texture texture) {
if ( texture != null ) {
texture.release();
}
}
/**
* Gets a file from your resource folder
* @param res - file name
* @return - File you request, otherwise a NullPointerException
*/
public static File getResourceLoc(String res) {
URL url = ClassLoader.getSystemResource(res);
if (url != null) {
try {
return new File(url.toURI());
} catch (URISyntaxException e) {
e.printStackTrace();
Display.destroy();
AL.destroy();
System.exit(1);
}
} else {
throw new NullPointerException("Engine could not find "+res+"!");
}
return null;
}
/**
* Allows rendering of OGL shapes
*/
public static void beginShapeRendering() {
glDisable(GL_TEXTURE_2D); // render shapes
}
/**
* Allows rendering of Textures
*/
public static void endShapeEndering() {
glEnable(GL_TEXTURE_2D);
}
public static void play(Sound sound) {
AL10.alSourcePlay(sound.source());
}
public static void pause(Sound sound) {
AL10.alSourcePause(sound.source());
}
public static void stop(Sound sound) {
AL10.alSourceStop(sound.source());
}
public static void rewind(Sound sound) {
AL10.alSourceRewind(sound.source());
}
public static void renderCircle(int x, int y, int radius) {
glBegin(GL_LINES);
for (int i = 0; i < radius; i ++) {
glVertex2d(x * Math.PI - i, y + i);
glVertex2d(x * Math.PI, y + i);
}
glEnd();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.client.impl;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSClientFaultInjector;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.classification.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* Used by {@link org.apache.hadoop.hdfs.DFSClient} for renewing
* file-being-written leases on the namenode.
* When a file is opened for write (create or append),
* namenode stores a file lease for recording the identity of the writer.
* The writer (i.e. the DFSClient) is required to renew the lease periodically.
* When the lease is not renewed before it expires,
* the namenode considers the writer as failed and then it may either let
* another writer to obtain the lease or close the file.
* </p>
* <p>
* This class also provides the following functionality:
* <ul>
* <li>
* It maintains a map from (namenode, user) pairs to lease renewers.
* The same {@link LeaseRenewer} instance is used for renewing lease
* for all the {@link org.apache.hadoop.hdfs.DFSClient} to the same namenode and
* the same user.
* </li>
* <li>
* Each renewer maintains a list of {@link org.apache.hadoop.hdfs.DFSClient}.
* Periodically the leases for all the clients are renewed.
* A client is removed from the list when the client is closed.
* </li>
* <li>
* A thread per namenode per user is used by the {@link LeaseRenewer}
* to renew the leases.
* </li>
* </ul>
* <p>
*/
@InterfaceAudience.Private
public class LeaseRenewer {
public static final Logger LOG = LoggerFactory.getLogger(LeaseRenewer.class);
private static long leaseRenewerGraceDefault = 60*1000L;
static final long LEASE_RENEWER_SLEEP_DEFAULT = 1000L;
private AtomicBoolean isLSRunning = new AtomicBoolean(false);
/** Get a {@link LeaseRenewer} instance */
public static LeaseRenewer getInstance(final String authority,
final UserGroupInformation ugi, final DFSClient dfsc) {
final LeaseRenewer r = Factory.INSTANCE.get(authority, ugi);
r.addClient(dfsc);
return r;
}
/**
* Remove the given renewer from the Factory.
* Subsequent call will receive new {@link LeaseRenewer} instance.
* @param renewer Instance to be cleared from Factory
*/
public static void remove(LeaseRenewer renewer) {
synchronized (renewer) {
Factory.INSTANCE.remove(renewer);
}
}
/**
* A factory for sharing {@link LeaseRenewer} objects
* among {@link DFSClient} instances
* so that there is only one renewer per authority per user.
*/
private static class Factory {
private static final Factory INSTANCE = new Factory();
private static class Key {
/** Namenode info */
final String authority;
/** User info */
final UserGroupInformation ugi;
private Key(final String authority, final UserGroupInformation ugi) {
if (authority == null) {
throw new HadoopIllegalArgumentException("authority == null");
} else if (ugi == null) {
throw new HadoopIllegalArgumentException("ugi == null");
}
this.authority = authority;
this.ugi = ugi;
}
@Override
public int hashCode() {
return authority.hashCode() ^ ugi.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj instanceof Key) {
final Key that = (Key)obj;
return this.authority.equals(that.authority)
&& this.ugi.equals(that.ugi);
}
return false;
}
@Override
public String toString() {
return ugi.getShortUserName() + "@" + authority;
}
}
/** A map for per user per namenode renewers. */
private final Map<Key, LeaseRenewer> renewers = new HashMap<>();
/** Get a renewer. */
private synchronized LeaseRenewer get(final String authority,
final UserGroupInformation ugi) {
final Key k = new Key(authority, ugi);
LeaseRenewer r = renewers.get(k);
if (r == null) {
r = new LeaseRenewer(k);
renewers.put(k, r);
}
return r;
}
/** Remove the given renewer. */
private synchronized void remove(final LeaseRenewer r) {
final LeaseRenewer stored = renewers.get(r.factorykey);
//Since a renewer may expire, the stored renewer can be different.
if (r == stored) {
// Expire LeaseRenewer daemon thread as soon as possible.
r.clearClients();
r.setEmptyTime(0);
renewers.remove(r.factorykey);
}
}
}
/** The time in milliseconds that the map became empty. */
private long emptyTime = Long.MAX_VALUE;
/** A fixed lease renewal time period in milliseconds */
private long renewal = HdfsConstants.LEASE_SOFTLIMIT_PERIOD / 2;
/** A daemon for renewing lease */
private Daemon daemon = null;
/** Only the daemon with currentId should run. */
private int currentId = 0;
/**
* A period in milliseconds that the lease renewer thread should run
* after the map became empty.
* In other words,
* if the map is empty for a time period longer than the grace period,
* the renewer should terminate.
*/
private long gracePeriod;
/**
* The time period in milliseconds
* that the renewer sleeps for each iteration.
*/
private long sleepPeriod;
private final Factory.Key factorykey;
/** A list of clients corresponding to this renewer. */
private final List<DFSClient> dfsclients = new ArrayList<>();
/**
* A stringified stack trace of the call stack when the Lease Renewer
* was instantiated. This is only generated if trace-level logging is
* enabled on this class.
*/
private final String instantiationTrace;
private LeaseRenewer(Factory.Key factorykey) {
this.factorykey = factorykey;
unsyncSetGraceSleepPeriod(leaseRenewerGraceDefault);
if (LOG.isTraceEnabled()) {
instantiationTrace = StringUtils.stringifyException(
new Throwable("TRACE"));
} else {
instantiationTrace = null;
}
}
/** @return the renewal time in milliseconds. */
private synchronized long getRenewalTime() {
return renewal;
}
/** Used for testing only. */
@VisibleForTesting
public synchronized void setRenewalTime(final long renewal) {
this.renewal = renewal;
}
/** Add a client. */
private synchronized void addClient(final DFSClient dfsc) {
for(DFSClient c : dfsclients) {
if (c == dfsc) {
//client already exists, nothing to do.
return;
}
}
//client not found, add it
dfsclients.add(dfsc);
//update renewal time
final int hdfsTimeout = dfsc.getConf().getHdfsTimeout();
if (hdfsTimeout > 0) {
final long half = hdfsTimeout/2;
if (half < renewal) {
this.renewal = half;
}
}
}
private synchronized void clearClients() {
dfsclients.clear();
}
private synchronized boolean clientsRunning() {
for(Iterator<DFSClient> i = dfsclients.iterator(); i.hasNext(); ) {
if (!i.next().isClientRunning()) {
i.remove();
}
}
return !dfsclients.isEmpty();
}
private synchronized long getSleepPeriod() {
return sleepPeriod;
}
/** Set the grace period and adjust the sleep period accordingly. */
synchronized void setGraceSleepPeriod(final long gracePeriod) {
unsyncSetGraceSleepPeriod(gracePeriod);
}
private void unsyncSetGraceSleepPeriod(final long gracePeriod) {
if (gracePeriod < 100L) {
throw new HadoopIllegalArgumentException(gracePeriod
+ " = gracePeriod < 100ms is too small.");
}
this.gracePeriod = gracePeriod;
final long half = gracePeriod/2;
this.sleepPeriod = half < LEASE_RENEWER_SLEEP_DEFAULT?
half: LEASE_RENEWER_SLEEP_DEFAULT;
}
@VisibleForTesting
/** Is the daemon running? */
public synchronized boolean isRunning() {
return daemon != null && daemon.isAlive();
}
/** Does this renewer have nothing to renew? */
public boolean isEmpty() {
return dfsclients.isEmpty();
}
/** Used only by tests */
synchronized String getDaemonName() {
return daemon.getName();
}
/** Is the empty period longer than the grace period? */
private synchronized boolean isRenewerExpired() {
return emptyTime != Long.MAX_VALUE
&& Time.monotonicNow() - emptyTime > gracePeriod;
}
public synchronized boolean put(final DFSClient dfsc) {
if (dfsc.isClientRunning()) {
if (!isRunning() || isRenewerExpired()) {
// Start a new daemon with a new id.
final int id = ++currentId;
if (isLSRunning.get()) {
// Not allowed to add multiple daemons into LeaseRenewer, let client
// create new LR and continue to acquire lease.
return false;
}
isLSRunning.getAndSet(true);
daemon = new Daemon(new Runnable() {
@Override
public void run() {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Lease renewer daemon for " + clientsString()
+ " with renew id " + id + " started");
}
LeaseRenewer.this.run(id);
} catch(InterruptedException e) {
LOG.debug("LeaseRenewer is interrupted.", e);
} finally {
synchronized(LeaseRenewer.this) {
Factory.INSTANCE.remove(LeaseRenewer.this);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Lease renewer daemon for " + clientsString()
+ " with renew id " + id + " exited");
}
}
}
@Override
public String toString() {
return String.valueOf(LeaseRenewer.this);
}
});
daemon.start();
}
emptyTime = Long.MAX_VALUE;
}
return true;
}
@VisibleForTesting
synchronized void setEmptyTime(long time) {
emptyTime = time;
}
/** Close the given client. */
public synchronized void closeClient(final DFSClient dfsc) {
dfsclients.remove(dfsc);
if (dfsclients.isEmpty()) {
if (!isRunning() || isRenewerExpired()) {
Factory.INSTANCE.remove(LeaseRenewer.this);
return;
}
if (emptyTime == Long.MAX_VALUE) {
//discover the first time that the client list is empty.
emptyTime = Time.monotonicNow();
}
}
//update renewal time
if (renewal == dfsc.getConf().getHdfsTimeout()/2) {
long min = HdfsConstants.LEASE_SOFTLIMIT_PERIOD;
for(DFSClient c : dfsclients) {
final int timeout = c.getConf().getHdfsTimeout();
if (timeout > 0 && timeout < min) {
min = timeout;
}
}
renewal = min/2;
}
}
public void interruptAndJoin() throws InterruptedException {
Daemon daemonCopy = null;
synchronized (this) {
if (isRunning()) {
daemon.interrupt();
daemonCopy = daemon;
}
}
if (daemonCopy != null) {
LOG.debug("Wait for lease checker to terminate");
daemonCopy.join();
}
}
private void renew() throws IOException {
final List<DFSClient> copies;
synchronized(this) {
copies = new ArrayList<>(dfsclients);
}
//sort the client names for finding out repeated names.
Collections.sort(copies, new Comparator<DFSClient>() {
@Override
public int compare(final DFSClient left, final DFSClient right) {
return left.getClientName().compareTo(right.getClientName());
}
});
String previousName = "";
for (final DFSClient c : copies) {
//skip if current client name is the same as the previous name.
if (!c.getClientName().equals(previousName)) {
if (!c.renewLease()) {
LOG.debug("Did not renew lease for client {}", c);
continue;
}
previousName = c.getClientName();
LOG.debug("Lease renewed for client {}", previousName);
}
}
}
/**
* Periodically check in with the namenode and renew all the leases
* when the lease period is half over.
*/
private void run(final int id) throws InterruptedException {
for(long lastRenewed = Time.monotonicNow(); !Thread.interrupted();
Thread.sleep(getSleepPeriod())) {
final long elapsed = Time.monotonicNow() - lastRenewed;
if (elapsed >= getRenewalTime()) {
try {
renew();
if (LOG.isDebugEnabled()) {
LOG.debug("Lease renewer daemon for " + clientsString()
+ " with renew id " + id + " executed");
}
lastRenewed = Time.monotonicNow();
} catch (SocketTimeoutException ie) {
LOG.warn("Failed to renew lease for " + clientsString() + " for "
+ (elapsed/1000) + " seconds. Aborting ...", ie);
List<DFSClient> dfsclientsCopy;
synchronized (this) {
DFSClientFaultInjector.get().delayWhenRenewLeaseTimeout();
dfsclientsCopy = new ArrayList<>(dfsclients);
Factory.INSTANCE.remove(LeaseRenewer.this);
}
for (DFSClient dfsClient : dfsclientsCopy) {
dfsClient.closeAllFilesBeingWritten(true);
}
break;
} catch (IOException ie) {
LOG.warn("Failed to renew lease for " + clientsString() + " for "
+ (elapsed/1000) + " seconds. Will retry shortly ...", ie);
}
}
synchronized(this) {
if (id != currentId || isRenewerExpired()) {
if (LOG.isDebugEnabled()) {
if (id != currentId) {
LOG.debug("Lease renewer daemon for " + clientsString()
+ " with renew id " + id + " is not current");
} else {
LOG.debug("Lease renewer daemon for " + clientsString()
+ " with renew id " + id + " expired");
}
}
//no longer the current daemon or expired
return;
}
// if no clients are in running state or there is no more clients
// registered with this renewer, stop the daemon after the grace
// period.
if (!clientsRunning() && emptyTime == Long.MAX_VALUE) {
emptyTime = Time.monotonicNow();
}
}
}
}
@Override
public String toString() {
String s = getClass().getSimpleName() + ":" + factorykey;
if (LOG.isTraceEnabled()) {
return s + ", clients=" + clientsString()
+ ", created at " + instantiationTrace;
}
return s;
}
/** Get the names of all clients */
private synchronized String clientsString() {
if (dfsclients.isEmpty()) {
return "[]";
} else {
final StringBuilder b = new StringBuilder("[").append(
dfsclients.get(0).getClientName());
for(int i = 1; i < dfsclients.size(); i++) {
b.append(", ").append(dfsclients.get(i).getClientName());
}
return b.append("]").toString();
}
}
@VisibleForTesting
public static void setLeaseRenewerGraceDefault(
long leaseRenewerGraceDefault) {
LeaseRenewer.leaseRenewerGraceDefault = leaseRenewerGraceDefault;
}
}
| |
package com.neuronrobotics.bowlerstudio.util;
/*
* Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.File;
import java.io.IOException;
import java.nio.file.*;
import static java.nio.file.StandardWatchEventKinds.*;
import java.nio.file.attribute.*;
import java.util.*;
import com.neuronrobotics.bowlerstudio.IssueReportingExceptionHandler;
// TODO: Auto-generated Javadoc
/**
* The Class FileChangeWatcher.
*/
public class FileChangeWatcher {
/** The file to watch. */
private File fileToWatch;
/** The run. */
private boolean run = true;
/** The watcher. */
private final WatchService watcher;
/** The keys. */
private final Map<WatchKey, Path> keys;
/** The recursive. */
private final boolean recursive = false;
/** The listeners. */
private ArrayList<IFileChangeListener> listeners = new ArrayList<IFileChangeListener>();
private static boolean runThread = true;
private static HashMap<String, FileChangeWatcher> activeListener = new HashMap<String, FileChangeWatcher>();
private Thread watcherThread = null;
/**
* clear the listeners
*/
public static void clearAll() {
Object[] array = activeListener.keySet().toArray();
for (int i = 0; i < array.length; i++) {
Object key = array[i];
activeListener.get(key).close();
}
activeListener.clear();
}
public static void notifyOfDelete(File fileToWatch) {
String path = fileToWatch.getAbsolutePath();
if (activeListener.get(path) != null) {
ArrayList<IFileChangeListener> listeners2 = new ArrayList<>();
listeners2.addAll( activeListener.get(path).listeners);
for (int i = 0; i < listeners2.size(); i++) {
IFileChangeListener l = listeners2.get(i);
l.onFileDelete(fileToWatch);
}
}
}
public static void close(File fileToWatch) {
String path = fileToWatch.getAbsolutePath();
if (activeListener.get(path) != null) {
activeListener.get(path).close();
}
}
/**
* Start watching a file
*
* @param fileToWatch
* a file that should be watched
* @return the watcher object for this file
* @throws IOException
*/
public static FileChangeWatcher watch(File fileToWatch) throws IOException {
String path = fileToWatch.getAbsolutePath();
if (activeListener.get(path) == null) {
activeListener.put(path, new FileChangeWatcher(fileToWatch));
System.err.println("Adding file to listening " + fileToWatch.getAbsolutePath());
}
return activeListener.get(path);
}
/**
* Instantiates a new file change watcher.
*
* @param fileToWatch
* the file to watch
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private FileChangeWatcher(File fileToWatch) throws IOException {
this.setFileToWatch(fileToWatch);
//System.err.println("\n\n\n\tWatching "+fileToWatch.getAbsolutePath()+"\n\n\n");
this.watcher = FileSystems.getDefault().newWatchService();
this.keys = new HashMap<WatchKey, Path>();
Path dir = Paths.get(fileToWatch.getParent());
if (recursive) {
System.out.format("Scanning %s ...\n", dir);
registerAll(dir);
System.out.println("Done.");
} else {
register(dir);
}
watcherThread = new Thread() {
public void run() {
setName("File Watcher Thread "+fileToWatch.getName());
//new Exception("Starting File Watcher Thread").printStackTrace();
Thread.currentThread().setUncaughtExceptionHandler(new IssueReportingExceptionHandler());
while (run) {
try {
//System.err.println("Checking File: " + getFileToWatch().getAbsolutePath());
watch();
} catch (Exception ex) {
ex.printStackTrace();
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//new Exception("File Watcher Thread Died").printStackTrace();
}
};
watcherThread.start();
}
/**
* Adds the i file change listener.
*
* @param l
* the l
*/
public void addIFileChangeListener(IFileChangeListener l) {
if (!listeners.contains(l)) {
listeners.add(l);
}
}
/**
* Removes the i file change listener.
*
* @param l
* the l
*/
public void removeIFileChangeListener(IFileChangeListener l) {
if (listeners.contains(l)) {
listeners.remove(l);
}
// if(listeners.size()==0){
// close() ;
// }
}
/**
* Cast.
*
* @param <T>
* the generic type
* @param event
* the event
* @return the watch event
*/
@SuppressWarnings("unchecked")
static <T> WatchEvent<T> cast(WatchEvent<?> event) {
return (WatchEvent<T>) event;
}
/**
* Register the given directory with the WatchService.
*
* @param dir
* the dir
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private void register(Path dir) throws IOException {
WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY);
Path prev = keys.get(key);
if (prev == null) {
// System.out.format("register: %s\n", dir);
} else {
if (!dir.equals(prev)) {
// System.out.format("update: %s -> %s\n", prev, dir);
}
}
keys.put(key, dir);
}
/**
* Register the given directory, and all its sub-directories, with the
* WatchService.
*
* @param start
* the start
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private void registerAll(final Path start) throws IOException {
// register directory and sub-directories
Files.walkFileTree(start, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
register(dir);
return FileVisitResult.CONTINUE;
}
});
}
/**
* Perfom the watch execution
*/
public void watch() {
// wait for key to be signalled
WatchKey key;
try {
key = watcher.take();
} catch (Exception x) {
return;
}
if(!run)
return;
Path dir = keys.get(key);
if (dir == null) {
System.err.println("WatchKey not recognized!!");
return;
}
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind kind = event.kind();
// TBD - provide example of how OVERFLOW event is handled
if (kind == OVERFLOW) {
continue;
}
// Context for directory entry event is the file name of entry
WatchEvent<Path> ev = cast(event);
Path name = ev.context();
Path child = dir.resolve(name);
try {
if (!child.toFile().getCanonicalPath().equals(fileToWatch.getCanonicalPath())) {
continue;
}
// print out event
// System.out.format("%s: %s\n", event.kind().name(), child);
System.err.println("File Changed: " + getFileToWatch().getAbsolutePath());
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onFileChange(child.toFile(), event);
Thread.sleep(50);// pad out the events to avoid file box
// overwrites
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// reset key and remove from set if directory no longer accessible
boolean valid = key.reset();
if (!valid) {
keys.remove(key);
// all directories are inaccessible
if (keys.isEmpty()) {
return;
}
}
}
/**
* Gets the file to watch.
*
* @return the file to watch
*/
public File getFileToWatch() {
return fileToWatch;
}
/**
* Sets the file to watch.
*
* @param fileToWatch
* the new file to watch
*/
public void setFileToWatch(File fileToWatch) {
this.fileToWatch = fileToWatch;
}
/**
* Checks if is run.
*
* @return true, if is run
*/
public boolean isRun() {
return run;
}
/**
* Close.
*/
public void close() {
//new Exception("File watcher closed " + fileToWatch.getAbsolutePath()).printStackTrace();
this.run = false;
try {
System.err.println("Closing watcher for "+fileToWatch.getAbsolutePath());
watcher.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
activeListener.remove(fileToWatch.getAbsolutePath());
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.ovr;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* rendering information for each eye. Computed by either {@link OVR#ovr_GetRenderDesc} based on the specified FOV. Note that the rendering viewport is not
* included here as it can be specified separately and modified per frame by passing different viewport values in the layer structure.
*
* <h3>Member documentation</h3>
*
* <ul>
* <li>{@code Eye} – the eye index this instance corresponds to. One of:<br><table><tr><td>{@link OVR#ovrEye_Left Eye_Left}</td><td>{@link OVR#ovrEye_Right Eye_Right}</td></tr></table></li>
* <li>{@code Fov} – the field of view</li>
* <li>{@code DistortedViewport} – distortion viewport</li>
* <li>{@code PixelsPerTanAngleAtCenter} – wow many display pixels will fit in tan(angle) = 1</li>
* <li>{@code HmdToEyeOffset} – translation of each eye, in meters.</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>struct ovrEyeRenderDesc {
ovrEyeType Eye;
{@link OVRFovPort ovrFovPort} Fov;
{@link OVRRecti ovrRecti} DistortedViewport;
{@link OVRVector2f ovrVector2f} PixelsPerTanAngleAtCenter;
{@link OVRVector3f ovrVector3f} HmdToEyeOffset;
}</code></pre>
*/
public class OVREyeRenderDesc extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
EYE,
FOV,
DISTORTEDVIEWPORT,
PIXELSPERTANANGLEATCENTER,
HMDTOEYEOFFSET;
static {
Layout layout = __struct(
__member(4),
__member(OVRFovPort.SIZEOF, OVRFovPort.ALIGNOF),
__member(OVRRecti.SIZEOF, OVRRecti.ALIGNOF),
__member(OVRVector2f.SIZEOF, OVRVector2f.ALIGNOF),
__member(OVRVector3f.SIZEOF, OVRVector3f.ALIGNOF)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
EYE = layout.offsetof(0);
FOV = layout.offsetof(1);
DISTORTEDVIEWPORT = layout.offsetof(2);
PIXELSPERTANANGLEATCENTER = layout.offsetof(3);
HMDTOEYEOFFSET = layout.offsetof(4);
}
OVREyeRenderDesc(long address, ByteBuffer container) {
super(address, container);
}
/**
* Creates a {@link OVREyeRenderDesc} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public OVREyeRenderDesc(ByteBuffer container) {
this(memAddress(container), checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** Returns the value of the {@code Eye} field. */
public int Eye() { return nEye(address()); }
/** Returns a {@link OVRFovPort} view of the {@code Fov} field. */
public OVRFovPort Fov() { return nFov(address()); }
/** Returns a {@link OVRRecti} view of the {@code DistortedViewport} field. */
public OVRRecti DistortedViewport() { return nDistortedViewport(address()); }
/** Returns a {@link OVRVector2f} view of the {@code PixelsPerTanAngleAtCenter} field. */
public OVRVector2f PixelsPerTanAngleAtCenter() { return nPixelsPerTanAngleAtCenter(address()); }
/** Returns a {@link OVRVector3f} view of the {@code HmdToEyeOffset} field. */
public OVRVector3f HmdToEyeOffset() { return nHmdToEyeOffset(address()); }
// -----------------------------------
/** Returns a new {@link OVREyeRenderDesc} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static OVREyeRenderDesc malloc() {
return create(nmemAlloc(SIZEOF));
}
/** Returns a new {@link OVREyeRenderDesc} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static OVREyeRenderDesc calloc() {
return create(nmemCalloc(1, SIZEOF));
}
/** Returns a new {@link OVREyeRenderDesc} instance allocated with {@link BufferUtils}. */
public static OVREyeRenderDesc create() {
return new OVREyeRenderDesc(BufferUtils.createByteBuffer(SIZEOF));
}
/** Returns a new {@link OVREyeRenderDesc} instance for the specified memory address or {@code null} if the address is {@code NULL}. */
public static OVREyeRenderDesc create(long address) {
return address == NULL ? null : new OVREyeRenderDesc(address, null);
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer malloc(int capacity) {
return create(nmemAlloc(capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer calloc(int capacity) {
return create(nmemCalloc(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static Buffer create(int capacity) {
return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF));
}
/**
* Create a {@link OVREyeRenderDesc.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static Buffer create(long address, int capacity) {
return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity);
}
// -----------------------------------
/** Returns a new {@link OVREyeRenderDesc} instance allocated on the thread-local {@link MemoryStack}. */
public static OVREyeRenderDesc mallocStack() {
return mallocStack(stackGet());
}
/** Returns a new {@link OVREyeRenderDesc} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */
public static OVREyeRenderDesc callocStack() {
return callocStack(stackGet());
}
/**
* Returns a new {@link OVREyeRenderDesc} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static OVREyeRenderDesc mallocStack(MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@link OVREyeRenderDesc} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static OVREyeRenderDesc callocStack(MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated on the thread-local {@link MemoryStack}.
*
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity) {
return mallocStack(capacity, stackGet());
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero.
*
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity) {
return callocStack(capacity, stackGet());
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity, MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link OVREyeRenderDesc.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity, MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #Eye}. */
public static int nEye(long struct) { return memGetInt(struct + OVREyeRenderDesc.EYE); }
/** Unsafe version of {@link #Fov}. */
public static OVRFovPort nFov(long struct) { return OVRFovPort.create(struct + OVREyeRenderDesc.FOV); }
/** Unsafe version of {@link #DistortedViewport}. */
public static OVRRecti nDistortedViewport(long struct) { return OVRRecti.create(struct + OVREyeRenderDesc.DISTORTEDVIEWPORT); }
/** Unsafe version of {@link #PixelsPerTanAngleAtCenter}. */
public static OVRVector2f nPixelsPerTanAngleAtCenter(long struct) { return OVRVector2f.create(struct + OVREyeRenderDesc.PIXELSPERTANANGLEATCENTER); }
/** Unsafe version of {@link #HmdToEyeOffset}. */
public static OVRVector3f nHmdToEyeOffset(long struct) { return OVRVector3f.create(struct + OVREyeRenderDesc.HMDTOEYEOFFSET); }
// -----------------------------------
/** An array of {@link OVREyeRenderDesc} structs. */
public static class Buffer extends StructBuffer<OVREyeRenderDesc, Buffer> implements NativeResource {
/**
* Creates a new {@link OVREyeRenderDesc.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link OVREyeRenderDesc#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
return new Buffer(address, container, mark, pos, lim, cap);
}
@Override
protected OVREyeRenderDesc newInstance(long address) {
return new OVREyeRenderDesc(address, container);
}
@Override
protected int sizeof() {
return SIZEOF;
}
/** Returns the value of the {@code Eye} field. */
public int Eye() { return OVREyeRenderDesc.nEye(address()); }
/** Returns a {@link OVRFovPort} view of the {@code Fov} field. */
public OVRFovPort Fov() { return OVREyeRenderDesc.nFov(address()); }
/** Returns a {@link OVRRecti} view of the {@code DistortedViewport} field. */
public OVRRecti DistortedViewport() { return OVREyeRenderDesc.nDistortedViewport(address()); }
/** Returns a {@link OVRVector2f} view of the {@code PixelsPerTanAngleAtCenter} field. */
public OVRVector2f PixelsPerTanAngleAtCenter() { return OVREyeRenderDesc.nPixelsPerTanAngleAtCenter(address()); }
/** Returns a {@link OVRVector3f} view of the {@code HmdToEyeOffset} field. */
public OVRVector3f HmdToEyeOffset() { return OVREyeRenderDesc.nHmdToEyeOffset(address()); }
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://wso2.com) All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.andes.kernel.subscription;
import com.googlecode.cqengine.query.Query;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.amqp.AMQPUtils;
import org.wso2.andes.kernel.AndesContext;
import org.wso2.andes.kernel.AndesContextStore;
import org.wso2.andes.kernel.AndesException;
import org.wso2.andes.kernel.ClusterNotificationListener;
import org.wso2.andes.kernel.ProtocolType;
import org.wso2.andes.kernel.SubscriptionListener;
import org.wso2.andes.kernel.disruptor.inbound.InboundSubscriptionEvent;
import org.wso2.andes.kernel.disruptor.inbound.InboundSubscriptionSyncEvent;
import org.wso2.andes.kernel.registry.StorageQueueRegistry;
import org.wso2.andes.kernel.registry.SubscriptionRegistry;
import org.wso2.andes.metrics.MetricsConstants;
import org.wso2.andes.mqtt.utils.MQTTUtils;
import org.wso2.andes.server.ClusterResourceHolder;
import org.wso2.andes.server.cluster.coordination.ClusterNotificationAgent;
import org.wso2.andes.server.cluster.coordination.CoordinationComponentFactory;
import org.wso2.andes.server.cluster.error.detection.NetworkPartitionListener;
import org.wso2.carbon.metrics.manager.Gauge;
import org.wso2.carbon.metrics.manager.Level;
import org.wso2.carbon.metrics.manager.MetricManager;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import static com.googlecode.cqengine.query.QueryFactory.and;
import static com.googlecode.cqengine.query.QueryFactory.equal;
/**
* Managers subscription add/remove and subscription query tasks inside Andes kernel
*/
public class AndesSubscriptionManager implements NetworkPartitionListener {
private static Log log = LogFactory.getLog(AndesSubscriptionManager.class);
/**
* Factory for creating subscriptions.
*/
private AndesSubscriptionFactory subscriptionFactory;
/**
* Broker wide registry for storing subscriptions
*/
private SubscriptionRegistry subscriptionRegistry;
/**
* Broker wide registry for storing queues
*/
private StorageQueueRegistry storageQueueRegistry;
/**
* ID of the local node
*/
private String localNodeId;
/**
* True when the minimum node count is not fulfilled, False otherwise
*/
private volatile boolean isNetworkPartitioned;
/**
* Listeners who are interested in local subscription changes
*/
private List<SubscriptionListener> subscriptionListeners = new ArrayList<>();
/**
* Agent for notifying local subscription changes to cluster
*/
private ClusterNotificationAgent clusterNotificationAgent;
/**
* Persistent store storing message router, queue, binding
* and subscription information
*/
private AndesContextStore andesContextStore;
/**
* Create a AndesSubscription manager instance. This is a static class managing
* subscriptions.
*
* @param subscriptionRegistry Registry storing subscriptions
* @param andesContextStore Persistent store storing message router, queue, binding
* and subscription information
*/
public AndesSubscriptionManager(SubscriptionRegistry subscriptionRegistry, AndesContextStore andesContextStore)
throws AndesException {
this.subscriptionRegistry = subscriptionRegistry;
this.isNetworkPartitioned = false;
this.subscriptionFactory = new AndesSubscriptionFactory();
this.storageQueueRegistry = AndesContext.getInstance().getStorageQueueRegistry();
this.andesContextStore = andesContextStore;
this.localNodeId = ClusterResourceHolder.getInstance().getClusterManager().getMyNodeID();
CoordinationComponentFactory coordinationComponentFactory = new CoordinationComponentFactory();
this.clusterNotificationAgent = coordinationComponentFactory.createClusterNotificationAgent();
if (AndesContext.getInstance().isClusteringEnabled()) {
// network partition detection works only when clustered.
AndesContext.getInstance().getClusterAgent().addNetworkPartitionListener(10, this);
}
//Add subscribers gauge to metrics manager
MetricManager.gauge(MetricsConstants.QUEUE_SUBSCRIBERS, Level.INFO, new QueueSubscriberGauge());
//Add topic gauge to metrics manager
MetricManager.gauge(MetricsConstants.TOPIC_SUBSCRIBERS, Level.INFO, new TopicSubscriberGauge());
}
/**
* Register a subscription lister
* It will be notified when a subscription change happened
*
* @param listener subscription listener
*/
public void addSubscriptionListener(SubscriptionListener listener) {
subscriptionListeners.add(listener);
}
public void registerSubscription(AndesSubscription subscriptionToAdd) {
subscriptionRegistry.registerSubscription(subscriptionToAdd);
}
public void addLocalSubscription(InboundSubscriptionEvent subscriptionRequest) throws AndesException {
// We don't add Subscriptions when the minimum node count is not fulfilled
if (isNetworkPartitioned) {
throw new SubscriptionException("Cannot add new subscription due to network partition");
}
StorageQueue storageQueue = storageQueueRegistry
.getStorageQueue(subscriptionRequest.getBoundStorageQueueName());
AndesSubscription subscription = subscriptionFactory
.createLocalSubscription(subscriptionRequest, storageQueue);
//binding contains some validations. Thus register should happen after binding subscriber to queue
storageQueue.bindSubscription(subscription, subscriptionRequest.getRoutingKey());
registerSubscription(subscription);
//Store the subscription
andesContextStore.storeDurableSubscription(subscription);
log.info("Add Local subscription " + subscription.getProtocolType() + " " + subscription.toString());
notifySubscriptionListeners(subscription, ClusterNotificationListener.SubscriptionChange.Added);
clusterNotificationAgent.notifySubscriptionsChange(subscription,
ClusterNotificationListener.SubscriptionChange.Added);
}
/**
* Create a remote subscription and register in subscription registry. This subscriber has no
* physical connection in this node. It is not bound to any storage queue.
*
* @param subscriptionEvent Subscription sync request
* @throws SubscriptionException
*/
public void addRemoteSubscription(InboundSubscriptionSyncEvent subscriptionEvent) throws SubscriptionException {
// We don't add Subscriptions when the minimum node count is not fulfilled
if (isNetworkPartitioned) {
throw new SubscriptionException("Cannot add new subscription due to network partition");
}
AndesSubscription remoteSubscription = new AndesSubscription(subscriptionEvent.getEncodedSubscription());
registerSubscription(remoteSubscription);
log.info("Sync subscription [create] " + remoteSubscription.getProtocolType() + " " + remoteSubscription
.toString());
}
public void closeLocalSubscription(InboundSubscriptionEvent closeSubscriptionEvent) throws AndesException {
UUID protocolChannel = closeSubscriptionEvent.getSubscriber().getProtocolChannelID();
AndesSubscription subscription = getSubscriptionByProtocolChannel(protocolChannel);
removeLocalSubscriptionAndNotify(subscription);
}
public void closeRemoteSubscription(InboundSubscriptionSyncEvent closeSubscriptionEvent) throws AndesException {
AndesSubscription closedSubRepresentation =
new AndesSubscription(closeSubscriptionEvent.getEncodedSubscription());
UUID protocolChannel = closedSubRepresentation.getSubscriberConnection().getProtocolChannelID();
AndesSubscription subscription = getSubscriptionByProtocolChannel(protocolChannel);
subscriptionRegistry.removeSubscription(subscription);
log.info("Sync subscription [close] " + subscription.getProtocolType() + " " + subscription.toString());
}
/**
* Remove local subscription. Unbind subscription from queue,
* remove from registry, notify local subscription listeners and notify
* cluster on subscription close
*
* @param subscription AndesSubscription to close
* @throws AndesException
*/
private void removeLocalSubscriptionAndNotify(AndesSubscription subscription) throws AndesException {
subscriptionRegistry.removeSubscription(subscription);
subscription.getStorageQueue().unbindSubscription(subscription);
andesContextStore.removeDurableSubscription(subscription);
notifySubscriptionListeners(subscription, ClusterNotificationListener.SubscriptionChange.Closed);
clusterNotificationAgent.notifySubscriptionsChange(subscription,
ClusterNotificationListener.SubscriptionChange.Closed);
log.info("Remove Local Subscription " + subscription.getProtocolType() + " " + subscription.toString());
}
/**
* Get mock subscribers representing inactive durable topic subscriptions on broker
*
* @return List of inactive
*/
public List<AndesSubscription> getInactiveSubscriberRepresentations() {
List<AndesSubscription> inactiveSubscriptions = new ArrayList<>();
List<StorageQueue> storageQueues = AndesContext.getInstance().
getStorageQueueRegistry().getAllStorageQueues();
for (StorageQueue storageQueue : storageQueues) {
boolean isQueueDurable = storageQueue.isDurable();
if (isQueueDurable) {
//only durable queues are kept bounded to message routers
String messageRouterName = storageQueue.getMessageRouter().getName();
if (AMQPUtils.TOPIC_EXCHANGE_NAME.equals(messageRouterName)) {
List<AndesSubscription> boundSubscriptions = storageQueue.getBoundedSubscriptions();
if (boundSubscriptions.isEmpty()) {
AndesSubscription mockSubscription = new InactiveMockSubscriber(storageQueue.getName(),
storageQueue.getName(),
storageQueue, ProtocolType.AMQP);
inactiveSubscriptions.add(mockSubscription);
}
} else if (MQTTUtils.MQTT_EXCHANGE_NAME.equals(messageRouterName)) {
List<AndesSubscription> boundSubscriptions = storageQueue.getBoundedSubscriptions();
if (boundSubscriptions.isEmpty()) {
AndesSubscription mockSubscription = new InactiveMockSubscriber(storageQueue.getName(),
storageQueue.getName(),
storageQueue, ProtocolType.MQTT);
inactiveSubscriptions.add(mockSubscription);
}
}
}
}
return inactiveSubscriptions;
}
/**
* Remove the subscription from subscriptionRegistry
*
* @param channelID protocol channel ID
* @param nodeID ID of the node subscription bound to
*/
public void removeSubscriptionFromRegistry(UUID channelID, String nodeID) throws AndesException {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription.CHANNEL_ID, channelID), equal(AndesSubscription
.NODE_ID, nodeID));
for (AndesSubscription sub : subscriptionRegistry.exucuteQuery(subscriptionQuery)) {
removeLocalSubscriptionAndNotify(sub);
}
}
private void notifySubscriptionListeners(AndesSubscription subscription,
ClusterNotificationListener.SubscriptionChange changeType) throws
AndesException {
for (SubscriptionListener subscriptionListener : subscriptionListeners) {
subscriptionListener.handleSubscriptionsChange(subscription, changeType);
}
}
public AndesSubscription getSubscriptionByProtocolChannel(UUID channelID, ProtocolType
protocolType) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription.CHANNEL_ID, channelID), equal(AndesSubscription
.NODE_ID, localNodeId), equal(AndesSubscription
.PROTOCOL, protocolType));
return subscriptionRegistry.exucuteQuery(subscriptionQuery).iterator().next();
}
public AndesSubscription getSubscriptionByProtocolChannel(UUID channelID) {
Query<AndesSubscription> subscriptionQuery = equal(AndesSubscription.CHANNEL_ID, channelID);
Iterable<AndesSubscription> subscriptions = subscriptionRegistry.exucuteQuery(subscriptionQuery);
Iterator<AndesSubscription> subIterator = subscriptions.iterator();
if (subIterator.hasNext()) {
return subIterator.next();
} else {
log.warn("No subscription found for channel ID " + channelID);
return null;
}
}
/**
* Get the AndesSubscription by subscription ID
* @param subscriptionId subscription ID to query
* @return matching subscription
*/
public AndesSubscription getSubscriptionById(String subscriptionId) {
Query<AndesSubscription> subscriptionQuery = equal(AndesSubscription.SUB_ID, subscriptionId);
Iterable<AndesSubscription> subscriptions = subscriptionRegistry.exucuteQuery(subscriptionQuery);
Iterator<AndesSubscription> subIterator = subscriptions.iterator();
if (subIterator.hasNext()) {
return subIterator.next();
} else {
log.warn("No subscription found for subscription ID " + subscriptionId);
return null;
}
}
public AndesSubscription getSubscriptionByNode(String nodeID, ProtocolType
protocolType) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.NODE_ID, nodeID), equal(AndesSubscription
.PROTOCOL, protocolType));
return subscriptionRegistry.exucuteQuery(subscriptionQuery).iterator().next();
}
public Iterable<AndesSubscription> getAllLocalSubscriptions(ProtocolType protocolType) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.NODE_ID, localNodeId), equal(AndesSubscription
.PROTOCOL, protocolType));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
/**
* Get all subscriptions connected locally
*
* @return list of AndesSubscription
*/
public Iterable<AndesSubscription> getAllLocalSubscriptions() {
Query<AndesSubscription> subscriptionQuery = (equal(AndesSubscription
.NODE_ID, localNodeId));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
/**
* Get all subscriptions in cluster bound to given queue
*
* @param protocolType protocol of subscriber
* @param storageQueueName name of queue subscriber is bound to
* @return Iterable over selected subscriptions
*/
public Iterable<AndesSubscription> getAllSubscriptionsByQueue(ProtocolType protocolType, String
storageQueueName) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.STORAGE_QUEUE_NAME, storageQueueName));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllLocalSubscriptionsByQueue(ProtocolType protocolType, String
storageQueueName) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.NODE_ID, localNodeId), equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.STORAGE_QUEUE_NAME, storageQueueName));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllLocalSubscriptionsByRoutingKey(ProtocolType protocolType, String
routingKey) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.NODE_ID, localNodeId), equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.ROUTING_KEY, routingKey));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllSubscriptions(ProtocolType protocolType) {
Query<AndesSubscription> subscriptionQuery = equal(AndesSubscription
.PROTOCOL, protocolType);
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllSubscriptionsByQueue(String storageQueueName) {
Query<AndesSubscription> subscriptionQuery =
equal(AndesSubscription.STORAGE_QUEUE_NAME, storageQueueName);
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllSubscriptionsByRoutingKey(ProtocolType protocolType, String
routingKey) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.ROUTING_KEY, routingKey));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllSubscriptionsByMessageRouter(ProtocolType protocolType, String
messageRouterName) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.ROUTER_NAME, messageRouterName));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
public Iterable<AndesSubscription> getAllLocalSubscriptionsByMessageRouter(ProtocolType protocolType,
String messageRouterName) {
Query<AndesSubscription> subscriptionQuery = and
(equal(AndesSubscription
.PROTOCOL, protocolType), equal(AndesSubscription
.ROUTER_NAME, messageRouterName), equal(AndesSubscription
.NODE_ID, localNodeId));
return subscriptionRegistry.exucuteQuery(subscriptionQuery);
}
/**
* Close all subscriptions belonging to a particular node. This is called
* when a node of cluster dis-joint from a cluster or get killed. This call
* closes subscriptions from local registry, update the DB, and notify other active
* nodes. If subscriptions are local it will forcefully disconnect subscriber from server side.
*
* @param nodeID ID of the node
* @throws AndesException
*/
public void closeAllActiveSubscriptionsOfNode(String nodeID) throws AndesException {
Query<AndesSubscription> subscriptionQuery = equal(AndesSubscription
.STATE, true);
for (AndesSubscription sub : subscriptionRegistry.exucuteQuery(subscriptionQuery)) {
SubscriberConnection connectionInfo = sub.getSubscriberConnection();
UUID channelID = connectionInfo.getProtocolChannelID();
sub.closeConnection(channelID, nodeID);
//simulate a local subscription remove. Notify the cluster
removeLocalSubscriptionAndNotify(sub);
}
}
public void closeAllLocalSubscriptionsBoundToQueue(String storageQueueName) throws AndesException {
StorageQueue queue = AndesContext.getInstance().
getStorageQueueRegistry().getStorageQueue(storageQueueName);
List<AndesSubscription> subscriptions = queue.getBoundedSubscriptions();
for (AndesSubscription subscription : subscriptions) {
SubscriberConnection connection = subscription.getSubscriberConnection();
UUID channelID = connection.getProtocolChannelID();
String nodeID = connection.getConnectedNode();
if (nodeID.equals(localNodeId)) {
subscription.closeConnection(channelID, nodeID);
removeLocalSubscriptionAndNotify(subscription);
}
}
}
/**
* Get Number of subscriptions cluster-wide by queue name
*
* @param queueName name of the queue
* @param protocolType ProtocolType (AMQP/MQTT)
* @return number of subscriptions
* @throws AndesException
*/
public int numberOfSubscriptionsInCluster(String queueName, ProtocolType protocolType)
throws AndesException {
Iterable<AndesSubscription> subscriptions = getAllSubscriptionsByQueue(protocolType, queueName);
List<AndesSubscription> subscriptionList = new ArrayList<>();
for (AndesSubscription subscription : subscriptions) {
subscriptionList.add(subscription);
}
return subscriptionList.size();
}
/**
* Forcefully disconnect all message consumers (/ subscribers) connected to
* this node. Typically broker node should do take such a action when a
* network partition happens ( since coordinator in other partition will
* also start distributing slots (hence messages) which will lead to
* inconsistent
* state in both partitions. Even if there is a exception trying to
* disconnect any of the connection this method will continue with other
* connections.
*/
public void forcefullyDisconnectAllLocalSubscriptions() throws AndesException {
Iterable<AndesSubscription> localSubscriptions = getAllLocalSubscriptions();
for (AndesSubscription localSubscription : localSubscriptions) {
localSubscription.forcefullyDisconnectConnections();
}
}
/**
* Remove all Subscriber Connections and Subscriptions (where necessary) that is bound to the
* queue specified.
*
* @param storageQueueName name of the storageQueue
* @throws SubscriptionException
*/
public void closeAllSubscriptionsBoundToQueue(String storageQueueName) throws AndesException {
Query<AndesSubscription> subscriptionQuery = equal(AndesSubscription
.STORAGE_QUEUE_NAME, storageQueueName);
Iterable<AndesSubscription> subscriptions
= subscriptionRegistry.exucuteQuery(subscriptionQuery);
for (AndesSubscription subscription : subscriptions) {
SubscriberConnection connection = subscription.getSubscriberConnection();
UUID channelID = connection.getProtocolChannelID();
String nodeID = connection.getConnectedNode();
subscription.closeConnection(channelID, nodeID);
//simulate a local subscription remove. Notify the cluster
removeLocalSubscriptionAndNotify(subscription);
}
}
public void closeAllActiveLocalSubscriptions() throws AndesException {
closeAllActiveSubscriptionsOfNode(localNodeId);
}
/**
* Notify cluster members with local subscriptions information after recovering from a split brain scenario
*
* @throws AndesException
*/
public void updateSubscriptionsAfterClusterMerge() throws AndesException {
clusterNotificationAgent.notifyAnyDBChange();
}
/**
* Reload subscriptions from DB storage and update subscription registry. This is a two step process
* 1. Sync the DB with the local subscriptions.
* 2. Sync the subscription registry with updated DB
*/
public void reloadSubscriptionsFromStorage() throws AndesException {
Map<String, List<String>> results = AndesContext.getInstance().getAndesContextStore()
.getAllStoredDurableSubscriptions();
Set<AndesSubscription> dbSubscriptions = new HashSet<>();
Set<AndesSubscription> localSubscriptions = new HashSet<>();
Set<AndesSubscription> copyOfLocalSubscriptions = new HashSet<>();
//get all local subscriptions in registry
Iterable<AndesSubscription> registeredLocalSubscriptions = getAllLocalSubscriptions();
for (AndesSubscription registeredLocalSubscription : registeredLocalSubscriptions) {
localSubscriptions.add(registeredLocalSubscription);
}
copyOfLocalSubscriptions.addAll(localSubscriptions);
//get all subscriptions in DB
for (Map.Entry<String, List<String>> entry : results.entrySet()) {
for (String subscriptionAsStr : entry.getValue()) {
AndesSubscription subscription = new AndesSubscription(subscriptionAsStr);
dbSubscriptions.add(subscription);
}
}
//if DB does not have the local subscription add it
localSubscriptions.removeAll(dbSubscriptions);
for (AndesSubscription subscription : localSubscriptions) {
log.warn("Subscriptions are not in sync. Local Subscription available "
+ "in subscription registry of node " + localNodeId
+ " but not in DB. Thus adding to DB subscription="
+ subscription.toString());
andesContextStore.storeDurableSubscription(subscription);
}
//if DB has additional local subscription that are not in registry, delete it
dbSubscriptions.removeAll(copyOfLocalSubscriptions);
for (AndesSubscription dbSubscription : dbSubscriptions) {
String nodeIDOfDBSub = dbSubscription.getSubscriberConnection().getConnectedNode();
if (localNodeId.equals(nodeIDOfDBSub)) {
log.warn("Subscriptions are not in sync. Local Subscription not available "
+ "in subscription registry of node " + localNodeId
+ " but is in DB. Thus removing from DB subscription= "
+ dbSubscription.toString());
andesContextStore.removeDurableSubscription(dbSubscription);
}
}
//Now as DB is synced with local subscriptions, check with all subscriptions
dbSubscriptions = new HashSet<>();
Map<String, List<String>> newResults = AndesContext.getInstance().getAndesContextStore()
.getAllStoredDurableSubscriptions();
for (Map.Entry<String, List<String>> entry : newResults.entrySet()) {
for (String subscriptionAsStr : entry.getValue()) {
AndesSubscription subscription = new AndesSubscription(subscriptionAsStr);
dbSubscriptions.add(subscription);
}
}
Set<AndesSubscription> allMemorySubscriptions = new HashSet<>();
Iterator<AndesSubscription> registeredSubscriptions = subscriptionRegistry.getAllSubscriptions();
while (registeredSubscriptions.hasNext()) {
allMemorySubscriptions.add(registeredSubscriptions.next());
}
//add and register subscriptions that are in DB but not in memory
dbSubscriptions.removeAll(allMemorySubscriptions);
for (AndesSubscription dbSubscription : dbSubscriptions) {
log.warn("Subscriptions are not in sync. Subscription not available "
+ "in subscription registry but is in DB. "
+ "Thus adding subscription to registry="
+ dbSubscription.toString());
subscriptionRegistry.registerSubscription(dbSubscription);
}
//remove the registered subscriptions that are not in DB
dbSubscriptions = new HashSet<>();
for (Map.Entry<String, List<String>> entry : newResults.entrySet()) {
for (String subscriptionAsStr : entry.getValue()) {
AndesSubscription subscription = new AndesSubscription(subscriptionAsStr);
dbSubscriptions.add(subscription);
}
}
allMemorySubscriptions.removeAll(dbSubscriptions);
for (AndesSubscription memorySubscription : allMemorySubscriptions) {
log.warn("Subscriptions are not in sync. Subscription is available "
+ "in subscription registry but not in DB. "
+ "Thus removing subscription from registry = "
+ memorySubscription.toString());
subscriptionRegistry.removeSubscription(memorySubscription);
}
}
/**
* Gauge will return total number of queue subscriptions for current node
*/
private class QueueSubscriberGauge implements Gauge<Integer> {
@Override
public Integer getValue() {
int count = 0;
for (AndesSubscription ignored : getAllLocalSubscriptionsByMessageRouter(ProtocolType.AMQP, AMQPUtils
.DIRECT_EXCHANGE_NAME)) {
count = count + 1;
}
return count;
}
}
/**
* Gauge will return total number of topic subscriptions current node
*/
private class TopicSubscriberGauge implements Gauge {
@Override
public Integer getValue() {
int count = 0;
for (AndesSubscription ignored : getAllLocalSubscriptionsByMessageRouter(ProtocolType.AMQP, AMQPUtils
.TOPIC_EXCHANGE_NAME)) {
count = count + 1;
}
return count;
}
}
/**
* {@inheritDoc}
* <p>
* In a event of a network partition (or nodes being offline, stopped,
* crashed) if minimum node count becomes less than required
* subscription manager will disconnect all consumers connected to this
* node.
* </p>
*/
@Override
public void minimumNodeCountNotFulfilled(int currentNodeCount) {
synchronized (this) {
isNetworkPartitioned = true;
}
log.warn("Minimum node count is below required, forcefully disconnecting all subscribers");
try {
forcefullyDisconnectAllLocalSubscriptions();
} catch (AndesException e) {
log.error("error occurred while forcefully disconnecting subscriptions", e);
}
}
@Override
public void minimumNodeCountFulfilled(int currentNodeCount) {
isNetworkPartitioned = false;
}
@Override
public void clusteringOutage() {
log.warn("Clustering outage, forcefully disconnecting all subscribers");
try {
forcefullyDisconnectAllLocalSubscriptions();
} catch (AndesException e) {
log.error("error occurred while forcefully disconnecting subscriptions", e);
}
}
}
| |
package com.dpg.spaceinvaders.screens;
import com.badlogic.ashley.core.Engine;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.utils.ImmutableArray;
import com.badlogic.gdx.Application.ApplicationType;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.ScreenAdapter;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.math.Vector3;
import com.dpg.spaceinvaders.SpaceInvadersGame;
import com.dpg.spaceinvaders.World;
import com.dpg.spaceinvaders.components.*;
import com.dpg.spaceinvaders.systems.AlienSystem;
import com.dpg.spaceinvaders.systems.AnimationSystem;
import com.dpg.spaceinvaders.systems.BoundsSystem;
import com.dpg.spaceinvaders.systems.CameraSystem;
import com.dpg.spaceinvaders.systems.CollisionSystem;
import com.dpg.spaceinvaders.systems.CollisionSystem.CollisionListener;
import com.dpg.spaceinvaders.systems.DefenderSystem;
import com.dpg.spaceinvaders.systems.MovementSystem;
import com.dpg.spaceinvaders.systems.RenderingSystem;
import com.dpg.spaceinvaders.systems.StateSystem;
public class MainGameScreen extends ScreenAdapter {
static final int GAME_READY = 0;
static final int GAME_RUNNING = 1;
static final int GAME_PAUSED = 2;
static final int GAME_LEVEL_END = 3;
static final int GAME_OVER = 4;
SpaceInvadersGame game;
OrthographicCamera guiCam;
Vector3 touchPoint;
World world;
CollisionListener collisionListener;
int lastScore;
String scoreString;
Engine engine;
private int state;
public MainGameScreen(SpaceInvadersGame game) {
this.game = game;
state = GAME_RUNNING;
guiCam = new OrthographicCamera(640, 480);
guiCam.position.set(640 / 2, 480 / 2, 0);
touchPoint = new Vector3();
collisionListener = new CollisionListener() {
@Override
public void jump() {
}
@Override
public void hit() {
}
@Override
public void highJump() {
}
@Override
public void coin() {
}
};
engine = new Engine();
world = new World(engine);
engine.addSystem(new DefenderSystem(world));
engine.addSystem(new AlienSystem(world));
engine.addSystem(new CameraSystem());
engine.addSystem(new MovementSystem());
engine.addSystem(new BoundsSystem());
engine.addSystem(new StateSystem());
engine.addSystem(new AnimationSystem());
engine.addSystem(new CollisionSystem(world, collisionListener));
engine.addSystem(new RenderingSystem(game.batcher));
world.create();
lastScore = 0;
scoreString = "SCORE: 0";
resumeSystems();
}
public void update(float deltaTime) {
if (deltaTime > 0.1f)
deltaTime = 0.1f;
engine.update(deltaTime);
switch (state) {
case GAME_READY:
updateReady();
break;
case GAME_RUNNING:
updateRunning(deltaTime);
break;
case GAME_PAUSED:
// updatePaused();
break;
case GAME_LEVEL_END:
// updateLevelEnd();
break;
case GAME_OVER:
// updateGameOver();
break;
}
}
private void updateReady() {
if (Gdx.input.justTouched()) {
state = GAME_RUNNING;
resumeSystems();
}
}
private void updateRunning(float deltaTime) {
// Implement Pause button check here
/* See ashley-jumper -> GameScreen -> line 155 */
ApplicationType appType = Gdx.app.getType();
float accelX = 0.0f;
boolean fireMissile = false;
if (appType == ApplicationType.Android || appType == ApplicationType.iOS) {
accelX = Gdx.input.getAccelerometerX();
if(Gdx.input.isTouched()){
fireMissile = true;
}
} else {
if (Gdx.input.isKeyPressed(Keys.DPAD_LEFT)) {
accelX = 5f;
}
if (Gdx.input.isKeyPressed(Keys.DPAD_RIGHT)) {
accelX = -5f;
}
if(Gdx.input.isKeyPressed(Keys.F)){
fireMissile = true;
}
}
engine.getSystem(DefenderSystem.class).setAccelX(accelX);
engine.getSystem(DefenderSystem.class).setIsFiring(fireMissile);
//Check if all aliens are gone
ImmutableArray<Entity> aliens = engine.getEntitiesFor(Family.all(AlienComponent.class, BoundsComponent.class,MovementComponent.class, TransformComponent.class, StateComponent.class).get());
if(aliens.size() == 0){
world.state = World.WORLD_STATE_GAME_OVER;
}
if (world.state == World.WORLD_STATE_GAME_OVER) {
state = GAME_OVER;
pauseSystems();
}
}
private void drawUI() {
guiCam.update();
game.batcher.setProjectionMatrix(guiCam.combined);
game.batcher.begin();
switch (state) {
case GAME_READY:
// presentReady();
break;
case GAME_RUNNING:
// presentRunning();
break;
case GAME_PAUSED:
// presentPaused();
break;
case GAME_LEVEL_END:
// presentLevelEnd();
break;
case GAME_OVER:
// presentGameOver();
break;
}
game.batcher.end();
}
private void pauseSystems() {
// engine.getSystem(BobSystem.class).setProcessing(false);
// engine.getSystem(SquirrelSystem.class).setProcessing(false);
// engine.getSystem(PlatformSystem.class).setProcessing(false);
// engine.getSystem(GravitySystem.class).setProcessing(false);
engine.getSystem(MovementSystem.class).setProcessing(false);
engine.getSystem(BoundsSystem.class).setProcessing(false);
engine.getSystem(StateSystem.class).setProcessing(false);
engine.getSystem(AnimationSystem.class).setProcessing(false);
engine.getSystem(CollisionSystem.class).setProcessing(false);
engine.getSystem(DefenderSystem.class).setProcessing(false);
engine.getSystem(AlienSystem.class).setProcessing(false);
}
private void resumeSystems() {
// engine.getSystem(BobSystem.class).setProcessing(true);
// engine.getSystem(SquirrelSystem.class).setProcessing(true);
// engine.getSystem(PlatformSystem.class).setProcessing(true);
// engine.getSystem(GravitySystem.class).setProcessing(true);
engine.getSystem(MovementSystem.class).setProcessing(true);
engine.getSystem(BoundsSystem.class).setProcessing(true);
engine.getSystem(StateSystem.class).setProcessing(true);
engine.getSystem(AnimationSystem.class).setProcessing(true);
engine.getSystem(CollisionSystem.class).setProcessing(true);
engine.getSystem(DefenderSystem.class).setProcessing(true);
engine.getSystem(AlienSystem.class).setProcessing(true);
}
@Override
public void render(float delta) {
update(delta);
drawUI();
}
}
| |
package com.dp1wms.controller.MantCliente;
import com.dp1wms.controller.FxmlController;
import com.dp1wms.controller.MainController;
import com.dp1wms.dao.RepositoryCargaMasiva;
import com.dp1wms.dao.RepositoryMantCliente;
import com.dp1wms.model.Cliente;
import com.dp1wms.util.ClienteCampo;
import com.dp1wms.view.ClientesView;
import com.dp1wms.view.StageManager;
import javafx.beans.property.SimpleStringProperty;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.control.*;
import javafx.stage.FileChooser;
import javafx.util.Callback;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
@Component
public class ClienteMainController implements FxmlController{
@FXML private ComboBox<ClienteCampo> campoCB;
@FXML private TextField busquedaTF;
@FXML private TableView<Cliente> clienteTable;
@FXML private TableColumn<Cliente, String> rucCol;
@FXML private TableColumn<Cliente, String> razonSocialCol;
@FXML private TableColumn<Cliente, String> dirCol;
@FXML private TableColumn<Cliente, String> telfCol;
@FXML private TableColumn<Cliente, String> emailCol;
@FXML private TableColumn<Cliente, String> estadoCol;
private List<Cliente> clientes;
@Autowired
private RepositoryMantCliente repositoryMantCliente;
@Autowired
private RepositoryCargaMasiva repositoryCargaMasiva;
private StageManager stageManager;
private MainController mainController;
private ClienteInfoController clienteInfoController;
public void cargaMasiva(ActionEvent event){
this.repositoryCargaMasiva.storeProcedure_cargarClientes();
this.llenarClienteTable();
}
@FXML
private void cargarCSV(){
FileChooser fc = new FileChooser();
fc.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("*CSV Files", "*.csv"));
File archivoCSV = fc.showOpenDialog(null);
if(archivoCSV != null){
try{
ArrayList<Cliente> clientes = new ArrayList<>();
BufferedReader br = new BufferedReader((new FileReader(archivoCSV)));
String line = null;
while((line = br.readLine()) != null){
String[] datos = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
Cliente c = new Cliente();
c.setNumDoc(datos[0]);
if(datos[1].contains("\"")){
c.setRazonSocial(datos[1].substring(1,datos[1].length()-1));
} else {
c.setRazonSocial(datos[1]);
}
c.setTelefono(datos[2]);
c.setDireccion(datos[3]);
c.setEmail(datos[4]);
clientes.add(c);
}
if(clientes.size() > 0){
this.repositoryMantCliente.registrarVariosClientes(clientes);
this.stageManager.mostrarInfoDialog("Mantenimiento de Clientes", null,
"Se cargaron los clientes en la base de datos");
this.clientes = clientes;
this.llenarClienteTable();
}
} catch(Exception e){
e.printStackTrace();
this.stageManager.mostrarErrorDialog("Mantenimiento de Clientes", null,
"No se pudo cargar los clientes.");
}
}
}
@FXML
private void buscarClientes(){
String campo = this.campoCB.getValue().campo;
String dato = this.busquedaTF.getText();
this.clientes = this.repositoryMantCliente.buscarCliente(campo, dato);
if(clientes != null){
this.llenarClienteTable();
}
}
@FXML
private void registrarCliente(){
this.clienteInfoController.setCliente(null);
this.stageManager.mostrarModal(ClientesView.INFO);
Cliente c = this.clienteInfoController.getCliente();
if(c != null){
if(this.clientes == null){
this.clientes = new ArrayList<>();
}
this.clientes.add(c);
this.clienteTable.getItems().add(c);
}
}
@FXML
private void modificarCliente(){
Cliente c = this.clienteTable.getSelectionModel().getSelectedItem();
if(c == null){
this.stageManager.mostrarErrorDialog("Error Mantenimiento Cliente", null,
"Debe seleccionar un cliente");
} else {
this.clienteInfoController.setCliente(new Cliente(c));
this.stageManager.mostrarModal(ClientesView.INFO);
Cliente cMod = this.clienteInfoController.getCliente();
if(cMod != null){
c.copyFrom(cMod);
this.llenarClienteTable();
}
}
}
@Override
public void initialize(){
this.initCampoCB();
this.limpiarClienteTable();
}
private void initCampoCB(){
ArrayList<ClienteCampo> campos = new ArrayList<>();
campos.add(new ClienteCampo("RUC / DNI", "numdoc"));
campos.add(new ClienteCampo("Razon Social", "razonsocial"));
campos.add(new ClienteCampo("Telefono", "telefono"));
campos.add(new ClienteCampo("Email", "email"));
this.campoCB.getItems().addAll(campos);
this.campoCB.getSelectionModel().select(0);
Callback<ListView<ClienteCampo>, ListCell<ClienteCampo>> factory = lv -> new ListCell<ClienteCampo>(){
@Override
protected void updateItem(ClienteCampo item, boolean empty) {
super.updateItem(item, empty);
setText(empty ? "" : item.descripcion);
}
};
this.campoCB.setCellFactory(factory);
this.campoCB.setButtonCell(factory.call(null));
}
private void limpiarClienteTable(){
this.clienteTable.getItems().clear();
this.rucCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().getNumDoc());
});
this.razonSocialCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().getRazonSocial());
});
this.dirCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().getDireccion());
});
this.telfCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().getTelefono());
});
this.emailCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().getEmail());
});
this.estadoCol.setCellValueFactory(value->{
return new SimpleStringProperty(value.getValue().isActivo()?"Habilitado":"Deshabilitado");
});
}
private void llenarClienteTable(){
this.limpiarClienteTable();
this.clienteTable.getItems().addAll(this.clientes);
}
@Autowired @Lazy
public ClienteMainController(StageManager stageManager,
MainController mainController,
ClienteInfoController clienteInfoController){
this.stageManager = stageManager;
this.mainController = mainController;
this.clienteInfoController = clienteInfoController;
}
}
| |
/*
* Copyright 2014 Eric F. Savage, code@efsavage.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ajah.job.run.data;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ajah.job.Job;
import com.ajah.job.run.RunId;
import com.ajah.job.run.RunMetric;
import com.ajah.job.run.RunMetricId;
import com.ajah.job.run.RunMetricStatus;
import com.ajah.job.run.RunMetricType;
import com.ajah.spring.jdbc.DataOperationResult;
import com.ajah.spring.jdbc.err.DataOperationException;
import com.vigilanced.client.VigilancedClient;
/**
* Manages data operations for {@link RunMetric}.
*
* @author Eric F. Savage <code@efsavage.com>
*
*/
@Service
@Slf4j
public class RunMetricManager {
@Autowired
private RunMetricDao runMetricDao;
@Autowired(required = false)
private VigilancedClient vigilancedClient;
/**
* Returns a count of all records.
*
* @return Count of all records.
* @throws DataOperationException
* If the query could not be executed.
*/
public long count() throws DataOperationException {
return count(null, null);
}
/**
* Counts the records available that match the criteria.
*
* @param type
* The runMetric type to limit to, optional.
* @param status
* The status to limit to, optional.
* @return The number of matching records.
* @throws DataOperationException
* If the query could not be executed.
*/
public long count(final RunMetricType type, final RunMetricStatus status) throws DataOperationException {
return this.runMetricDao.count(type, status);
}
/**
* Creates a new {@link RunMetric} with the given properties.
*
* @param name
* The name of the runMetric, required.
* @param value
* The value to submit for the metric.
* @param runId
* The ID of the Run this metric is associated with.
* @param job
* The job this run is for.
* @param external
* @return The result of the creation, which will include the new runMetric
* at {@link DataOperationResult#getEntity()}.
* @throws DataOperationException
* If the query could not be executed.
*/
public DataOperationResult<RunMetric> create(final String name, final BigDecimal value, final RunId runId, final Job job, final boolean external) throws DataOperationException {
final RunMetric runMetric = new RunMetric();
runMetric.setRunId(runId);
runMetric.setJobId(job.getId());
runMetric.setName(name);
runMetric.setType(RunMetricType.STANDARD);
runMetric.setStatus(RunMetricStatus.ACTIVE);
final DataOperationResult<RunMetric> result = save(runMetric);
if (external) {
if (this.vigilancedClient != null) {
// this.vigilancedClient.updateMetric(job.getMonitorKey(), name, value, null);
} else {
log.debug("No Vigilanced client is configured");
}
}
return result;
}
/**
* Marks the entity as {@link RunMetricStatus#DELETED}.
*
* @param runMetricId
* The ID of the runMetric to delete.
* @return The result of the deletion, will not include the new runMetric at
* {@link DataOperationResult#getEntity()}.
* @throws DataOperationException
* If the query could not be executed.
* @throws RunMetricNotFoundException
* If the ID specified did not match any runMetrics.
*/
public DataOperationResult<RunMetric> delete(final RunMetricId runMetricId) throws DataOperationException, RunMetricNotFoundException {
final RunMetric runMetric = load(runMetricId);
runMetric.setStatus(RunMetricStatus.DELETED);
final DataOperationResult<RunMetric> result = save(runMetric);
return result;
}
/**
* Returns a list of {@link RunMetric}s that match the specified criteria.
*
* @param type
* The type of runMetric, optional.
* @param status
* The status of the runMetric, optional.
* @param page
* The page of results to fetch.
* @param count
* The number of results per page.
* @return A list of {@link RunMetric}s, which may be empty.
* @throws DataOperationException
* If the query could not be executed.
*/
public List<RunMetric> list(final RunMetricType type, final RunMetricStatus status, final long page, final long count) throws DataOperationException {
return this.runMetricDao.list(type, status, page, count);
}
/**
* Loads an {@link RunMetric} by it's ID.
*
* @param runMetricId
* The ID to load, required.
* @return The matching runMetric, if found. Will not return null.
* @throws DataOperationException
* If the query could not be executed.
* @throws RunMetricNotFoundException
* If the ID specified did not match any runMetrics.
*/
public RunMetric load(final RunMetricId runMetricId) throws DataOperationException, RunMetricNotFoundException {
final RunMetric runMetric = this.runMetricDao.load(runMetricId);
if (runMetric == null) {
throw new RunMetricNotFoundException(runMetricId);
}
return runMetric;
}
/**
* Saves an {@link RunMetric}. Assigns a new ID ({@link UUID}) and sets the
* creation date if necessary. If either of these elements are set, will
* perform an insert. Otherwise will perform an update.
*
* @param runMetric
* The runMetric to save.
* @return The result of the save operation, which will include the new
* runMetric at {@link DataOperationResult#getEntity()}.
* @throws DataOperationException
* If the query could not be executed.
*/
public DataOperationResult<RunMetric> save(final RunMetric runMetric) throws DataOperationException {
boolean create = false;
if (runMetric.getId() == null) {
runMetric.setId(new RunMetricId(UUID.randomUUID().toString()));
create = true;
}
if (runMetric.getCreated() == null) {
runMetric.setCreated(new Date());
create = true;
}
if (create) {
final DataOperationResult<RunMetric> result = this.runMetricDao.insert(runMetric);
log.debug("Created RunMetric " + runMetric.getName() + " [" + runMetric.getId() + "]");
return result;
}
final DataOperationResult<RunMetric> result = this.runMetricDao.update(runMetric);
log.debug("Updated RunMetric " + runMetric.getName() + " [" + runMetric.getId() + "]");
return result;
}
/**
* Counts the records available that match the search criteria.
*
* @param search
* The search query.
* @return The number of matching records.
* @throws DataOperationException
* If the query could not be executed.
*/
public int searchCount(final String search) throws DataOperationException {
return this.runMetricDao.searchCount(search);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.client;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.client.cli.CliClient;
import org.apache.flink.table.client.cli.CliOptions;
import org.apache.flink.table.client.cli.CliOptionsParser;
import org.apache.flink.table.client.config.Environment;
import org.apache.flink.table.client.gateway.Executor;
import org.apache.flink.table.client.gateway.SessionContext;
import org.apache.flink.table.client.gateway.local.LocalExecutor;
import org.apache.commons.lang3.SystemUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.flink.table.client.config.entries.ConfigurationEntry.create;
import static org.apache.flink.table.client.config.entries.ConfigurationEntry.merge;
/**
* SQL Client for submitting SQL statements. The client can be executed in two
* modes: a gateway and embedded mode.
*
* <p>- In embedded mode, the SQL CLI is tightly coupled with the executor in a common process. This
* allows for submitting jobs without having to start an additional component.
*
* <p>- In future versions: In gateway mode, the SQL CLI client connects to the REST API of the gateway
* and allows for managing queries via console.
*
* <p>For debugging in an IDE you can execute the main method of this class using:
* "embedded --defaults /path/to/sql-client-defaults.yaml --jar /path/to/target/flink-sql-client-*.jar"
*
* <p>Make sure that the FLINK_CONF_DIR environment variable is set.
*/
public class SqlClient {
private static final Logger LOG = LoggerFactory.getLogger(SqlClient.class);
private final boolean isEmbedded;
private final CliOptions options;
public static final String MODE_EMBEDDED = "embedded";
public static final String MODE_GATEWAY = "gateway";
public static final String DEFAULT_SESSION_ID = "default";
public SqlClient(boolean isEmbedded, CliOptions options) {
this.isEmbedded = isEmbedded;
this.options = options;
}
private void start() {
if (isEmbedded) {
// create local executor with default environment
final List<URL> jars;
if (options.getJars() != null) {
jars = options.getJars();
} else {
jars = Collections.emptyList();
}
final List<URL> libDirs;
if (options.getLibraryDirs() != null) {
libDirs = options.getLibraryDirs();
} else {
libDirs = Collections.emptyList();
}
final Executor executor = new LocalExecutor(options.getDefaults(), jars, libDirs);
executor.start();
// create CLI client with session environment
final Environment sessionEnv = readSessionEnvironment(options.getEnvironment());
appendPythonConfig(sessionEnv, options.getPythonConfiguration());
final SessionContext context;
if (options.getSessionId() == null) {
context = new SessionContext(DEFAULT_SESSION_ID, sessionEnv);
} else {
context = new SessionContext(options.getSessionId(), sessionEnv);
}
// Open an new session
String sessionId = executor.openSession(context);
try {
// add shutdown hook
Runtime.getRuntime().addShutdownHook(new EmbeddedShutdownThread(sessionId, executor));
// do the actual work
openCli(sessionId, executor);
} finally {
executor.closeSession(sessionId);
}
} else {
throw new SqlClientException("Gateway mode is not supported yet.");
}
}
/**
* Opens the CLI client for executing SQL statements.
*
* @param sessionId session identifier for the current client.
* @param executor executor
*/
private void openCli(String sessionId, Executor executor) {
CliClient cli = null;
try {
Path historyFilePath;
if (options.getHistoryFilePath() != null) {
historyFilePath = Paths.get(options.getHistoryFilePath());
} else {
historyFilePath = Paths.get(System.getProperty("user.home"),
SystemUtils.IS_OS_WINDOWS ? "flink-sql-history" : ".flink-sql-history");
}
cli = new CliClient(sessionId, executor, historyFilePath);
// interactive CLI mode
if (options.getUpdateStatement() == null) {
cli.open();
}
// execute single update statement
else {
final boolean success = cli.submitUpdate(options.getUpdateStatement());
if (!success) {
throw new SqlClientException("Could not submit given SQL update statement to cluster.");
}
}
} finally {
if (cli != null) {
cli.close();
}
}
}
// --------------------------------------------------------------------------------------------
private static Environment readSessionEnvironment(URL envUrl) {
// use an empty environment by default
if (envUrl == null) {
System.out.println("No session environment specified.");
return new Environment();
}
System.out.println("Reading session environment from: " + envUrl);
LOG.info("Using session environment file: {}", envUrl);
try {
return Environment.parse(envUrl);
} catch (IOException e) {
throw new SqlClientException("Could not read session environment file at: " + envUrl, e);
}
}
private static void appendPythonConfig(Environment env, Configuration pythonConfiguration) {
Map<String, Object> pythonConfig = new HashMap<>(pythonConfiguration.toMap());
Map<String, Object> combinedConfig = new HashMap<>(merge(env.getConfiguration(), create(pythonConfig)).asMap());
env.setConfiguration(combinedConfig);
}
// --------------------------------------------------------------------------------------------
public static void main(String[] args) {
if (args.length < 1) {
CliOptionsParser.printHelpClient();
return;
}
switch (args[0]) {
case MODE_EMBEDDED:
// remove mode
final String[] modeArgs = Arrays.copyOfRange(args, 1, args.length);
final CliOptions options = CliOptionsParser.parseEmbeddedModeClient(modeArgs);
if (options.isPrintHelp()) {
CliOptionsParser.printHelpEmbeddedModeClient();
} else {
try {
final SqlClient client = new SqlClient(true, options);
client.start();
} catch (SqlClientException e) {
// make space in terminal
System.out.println();
System.out.println();
LOG.error("SQL Client must stop.", e);
throw e;
} catch (Throwable t) {
// make space in terminal
System.out.println();
System.out.println();
LOG.error("SQL Client must stop. Unexpected exception. This is a bug. Please consider filing an issue.", t);
throw new SqlClientException("Unexpected exception. This is a bug. Please consider filing an issue.", t);
}
}
break;
case MODE_GATEWAY:
throw new SqlClientException("Gateway mode is not supported yet.");
default:
CliOptionsParser.printHelpClient();
}
}
// --------------------------------------------------------------------------------------------
private static class EmbeddedShutdownThread extends Thread {
private final String sessionId;
private final Executor executor;
public EmbeddedShutdownThread(String sessionId, Executor executor) {
this.sessionId = sessionId;
this.executor = executor;
}
@Override
public void run() {
// Shutdown the executor
System.out.println("\nShutting down the session...");
executor.closeSession(sessionId);
System.out.println("done.");
}
}
}
| |
package com.animerom.filemanager.commands.java;
import android.util.Log;
import com.animerom.filemanager.commands.AsyncResultListener;
import com.animerom.filemanager.commands.FolderUsageExecutable;
import com.animerom.filemanager.console.ExecutionException;
import com.animerom.filemanager.console.InsufficientPermissionsException;
import com.animerom.filemanager.console.NoSuchFileOrDirectory;
import com.animerom.filemanager.model.FolderUsage;
import com.animerom.filemanager.util.MimeTypeHelper;
import com.animerom.filemanager.util.MimeTypeHelper.MimeTypeCategory;
import java.io.File;
/**
* A class for retrieve the disk usage of a folder.
*/
public class FolderUsageCommand extends Program implements FolderUsageExecutable {
private static final String TAG = "FolderUsage"; //$NON-NLS-1$
private final String mDirectory;
private final AsyncResultListener mAsyncResultListener;
private final FolderUsage mFolderUsage;
private boolean mCancelled;
private boolean mEnded;
private final Object mSync = new Object();
/**
* Constructor of <code>FolderUsageCommand</code>.
*
* @param directory The absolute directory to compute
* @param asyncResultListener The partial result listener
*/
public FolderUsageCommand(
String directory, AsyncResultListener asyncResultListener) {
super();
this.mDirectory = directory;
this.mAsyncResultListener = asyncResultListener;
this.mFolderUsage = new FolderUsage(directory);
this.mCancelled = false;
this.mEnded = false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isAsynchronous() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public FolderUsage getFolderUsage() {
return this.mFolderUsage;
}
/**
* {@inheritDoc}
*/
@Override
public void execute()
throws InsufficientPermissionsException, NoSuchFileOrDirectory, ExecutionException {
if (isTrace()) {
Log.v(TAG,
String.format("Computing folder usage for folder %s", //$NON-NLS-1$
this.mDirectory));
}
if (this.mAsyncResultListener != null) {
this.mAsyncResultListener.onAsyncStart();
}
File f = new File(this.mDirectory);
if (!f.exists()) {
if (isTrace()) {
Log.v(TAG, "Result: FAIL. NoSuchFileOrDirectory"); //$NON-NLS-1$
}
if (this.mAsyncResultListener != null) {
this.mAsyncResultListener.onException(new NoSuchFileOrDirectory(this.mDirectory));
}
}
if (!f.isDirectory()) {
if (isTrace()) {
Log.v(TAG, "Result: FAIL. NoSuchFileOrDirectory"); //$NON-NLS-1$
}
if (this.mAsyncResultListener != null) {
this.mAsyncResultListener.onException(
new ExecutionException("path exists but it's not a folder")); //$NON-NLS-1$
}
}
// Compute data recursively
computeRecursive(f);
synchronized (this.mSync) {
this.mEnded = true;
this.mSync.notify();
}
if (this.mAsyncResultListener != null) {
this.mAsyncResultListener.onAsyncEnd(this.mCancelled);
}
if (this.mAsyncResultListener != null) {
this.mAsyncResultListener.onAsyncExitCode(0);
}
if (isTrace()) {
Log.v(TAG, "Result: OK"); //$NON-NLS-1$
}
}
/**
* Method that computes the folder usage recursively
*
* @param folder The folder where to start the computation
*/
private void computeRecursive(File folder) {
// Obtains the files and folders of the folders
try {
File[] files = folder.listFiles();
int c = 0;
if (files != null) {
int cc = files.length;
for (int i = 0; i < cc; i++) {
if (files[i].isDirectory()) {
this.mFolderUsage.addFolder();
computeRecursive(files[i]);
} else {
this.mFolderUsage.addFile();
// Compute statistics and size
MimeTypeCategory category =
MimeTypeHelper.getCategory(null, files[i]);
this.mFolderUsage.addFileToCategory(category);
this.mFolderUsage.addSize(files[i].length());
}
// Partial notification
if (c % 5 == 0) {
//If a listener is defined, then send the partial result
if (getAsyncResultListener() != null) {
getAsyncResultListener().onPartialResult(this.mFolderUsage);
}
}
// Check if the process was cancelled
try {
synchronized (this.mSync) {
if (this.mCancelled || this.mEnded) {
this.mSync.notify();
break;
}
}
} catch (Exception e) {/**NON BLOCK**/}
}
}
} finally {
//If a listener is defined, then send the partial result
if (getAsyncResultListener() != null) {
getAsyncResultListener().onPartialResult(this.mFolderUsage);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCancelled() {
synchronized (this.mSync) {
return this.mCancelled;
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean cancel() {
try {
synchronized (this.mSync) {
if (this.mEnded || this.mCancelled) {
this.mCancelled = true;
return true;
}
this.mCancelled = true;
this.mSync.wait(5000L);
}
} catch (Exception e) {/**NON BLOCK**/}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean end() {
try {
synchronized (this.mSync) {
this.mEnded = true;
this.mSync.wait(5000L);
}
} catch (Exception e) {/**NON BLOCK**/}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public void setOnEndListener(OnEndListener onEndListener) {
//Ignore. Java console don't use this
}
/**
* {@inheritDoc}
*/
@Override
public void setOnCancelListener(OnCancelListener onCancelListener) {
//Ignore. Java console don't use this
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCancellable() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public AsyncResultListener getAsyncResultListener() {
return this.mAsyncResultListener;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.workflows.v1beta.stub;
import static com.google.cloud.workflows.v1beta.WorkflowsClient.ListWorkflowsPagedResponse;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.workflows.v1beta.CreateWorkflowRequest;
import com.google.cloud.workflows.v1beta.DeleteWorkflowRequest;
import com.google.cloud.workflows.v1beta.GetWorkflowRequest;
import com.google.cloud.workflows.v1beta.ListWorkflowsRequest;
import com.google.cloud.workflows.v1beta.ListWorkflowsResponse;
import com.google.cloud.workflows.v1beta.OperationMetadata;
import com.google.cloud.workflows.v1beta.UpdateWorkflowRequest;
import com.google.cloud.workflows.v1beta.Workflow;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the Workflows service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class GrpcWorkflowsStub extends WorkflowsStub {
private static final MethodDescriptor<ListWorkflowsRequest, ListWorkflowsResponse>
listWorkflowsMethodDescriptor =
MethodDescriptor.<ListWorkflowsRequest, ListWorkflowsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.workflows.v1beta.Workflows/ListWorkflows")
.setRequestMarshaller(
ProtoUtils.marshaller(ListWorkflowsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListWorkflowsResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetWorkflowRequest, Workflow> getWorkflowMethodDescriptor =
MethodDescriptor.<GetWorkflowRequest, Workflow>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.workflows.v1beta.Workflows/GetWorkflow")
.setRequestMarshaller(ProtoUtils.marshaller(GetWorkflowRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Workflow.getDefaultInstance()))
.build();
private static final MethodDescriptor<CreateWorkflowRequest, Operation>
createWorkflowMethodDescriptor =
MethodDescriptor.<CreateWorkflowRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.workflows.v1beta.Workflows/CreateWorkflow")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateWorkflowRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<DeleteWorkflowRequest, Operation>
deleteWorkflowMethodDescriptor =
MethodDescriptor.<DeleteWorkflowRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.workflows.v1beta.Workflows/DeleteWorkflow")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteWorkflowRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<UpdateWorkflowRequest, Operation>
updateWorkflowMethodDescriptor =
MethodDescriptor.<UpdateWorkflowRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.workflows.v1beta.Workflows/UpdateWorkflow")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateWorkflowRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private final UnaryCallable<ListWorkflowsRequest, ListWorkflowsResponse> listWorkflowsCallable;
private final UnaryCallable<ListWorkflowsRequest, ListWorkflowsPagedResponse>
listWorkflowsPagedCallable;
private final UnaryCallable<GetWorkflowRequest, Workflow> getWorkflowCallable;
private final UnaryCallable<CreateWorkflowRequest, Operation> createWorkflowCallable;
private final OperationCallable<CreateWorkflowRequest, Workflow, OperationMetadata>
createWorkflowOperationCallable;
private final UnaryCallable<DeleteWorkflowRequest, Operation> deleteWorkflowCallable;
private final OperationCallable<DeleteWorkflowRequest, Empty, OperationMetadata>
deleteWorkflowOperationCallable;
private final UnaryCallable<UpdateWorkflowRequest, Operation> updateWorkflowCallable;
private final OperationCallable<UpdateWorkflowRequest, Workflow, OperationMetadata>
updateWorkflowOperationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcWorkflowsStub create(WorkflowsStubSettings settings) throws IOException {
return new GrpcWorkflowsStub(settings, ClientContext.create(settings));
}
public static final GrpcWorkflowsStub create(ClientContext clientContext) throws IOException {
return new GrpcWorkflowsStub(WorkflowsStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcWorkflowsStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcWorkflowsStub(
WorkflowsStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcWorkflowsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcWorkflowsStub(WorkflowsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcWorkflowsCallableFactory());
}
/**
* Constructs an instance of GrpcWorkflowsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcWorkflowsStub(
WorkflowsStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListWorkflowsRequest, ListWorkflowsResponse> listWorkflowsTransportSettings =
GrpcCallSettings.<ListWorkflowsRequest, ListWorkflowsResponse>newBuilder()
.setMethodDescriptor(listWorkflowsMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<GetWorkflowRequest, Workflow> getWorkflowTransportSettings =
GrpcCallSettings.<GetWorkflowRequest, Workflow>newBuilder()
.setMethodDescriptor(getWorkflowMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<CreateWorkflowRequest, Operation> createWorkflowTransportSettings =
GrpcCallSettings.<CreateWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(createWorkflowMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<DeleteWorkflowRequest, Operation> deleteWorkflowTransportSettings =
GrpcCallSettings.<DeleteWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(deleteWorkflowMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<UpdateWorkflowRequest, Operation> updateWorkflowTransportSettings =
GrpcCallSettings.<UpdateWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(updateWorkflowMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("workflow.name", String.valueOf(request.getWorkflow().getName()));
return params.build();
})
.build();
this.listWorkflowsCallable =
callableFactory.createUnaryCallable(
listWorkflowsTransportSettings, settings.listWorkflowsSettings(), clientContext);
this.listWorkflowsPagedCallable =
callableFactory.createPagedCallable(
listWorkflowsTransportSettings, settings.listWorkflowsSettings(), clientContext);
this.getWorkflowCallable =
callableFactory.createUnaryCallable(
getWorkflowTransportSettings, settings.getWorkflowSettings(), clientContext);
this.createWorkflowCallable =
callableFactory.createUnaryCallable(
createWorkflowTransportSettings, settings.createWorkflowSettings(), clientContext);
this.createWorkflowOperationCallable =
callableFactory.createOperationCallable(
createWorkflowTransportSettings,
settings.createWorkflowOperationSettings(),
clientContext,
operationsStub);
this.deleteWorkflowCallable =
callableFactory.createUnaryCallable(
deleteWorkflowTransportSettings, settings.deleteWorkflowSettings(), clientContext);
this.deleteWorkflowOperationCallable =
callableFactory.createOperationCallable(
deleteWorkflowTransportSettings,
settings.deleteWorkflowOperationSettings(),
clientContext,
operationsStub);
this.updateWorkflowCallable =
callableFactory.createUnaryCallable(
updateWorkflowTransportSettings, settings.updateWorkflowSettings(), clientContext);
this.updateWorkflowOperationCallable =
callableFactory.createOperationCallable(
updateWorkflowTransportSettings,
settings.updateWorkflowOperationSettings(),
clientContext,
operationsStub);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListWorkflowsRequest, ListWorkflowsResponse> listWorkflowsCallable() {
return listWorkflowsCallable;
}
@Override
public UnaryCallable<ListWorkflowsRequest, ListWorkflowsPagedResponse>
listWorkflowsPagedCallable() {
return listWorkflowsPagedCallable;
}
@Override
public UnaryCallable<GetWorkflowRequest, Workflow> getWorkflowCallable() {
return getWorkflowCallable;
}
@Override
public UnaryCallable<CreateWorkflowRequest, Operation> createWorkflowCallable() {
return createWorkflowCallable;
}
@Override
public OperationCallable<CreateWorkflowRequest, Workflow, OperationMetadata>
createWorkflowOperationCallable() {
return createWorkflowOperationCallable;
}
@Override
public UnaryCallable<DeleteWorkflowRequest, Operation> deleteWorkflowCallable() {
return deleteWorkflowCallable;
}
@Override
public OperationCallable<DeleteWorkflowRequest, Empty, OperationMetadata>
deleteWorkflowOperationCallable() {
return deleteWorkflowOperationCallable;
}
@Override
public UnaryCallable<UpdateWorkflowRequest, Operation> updateWorkflowCallable() {
return updateWorkflowCallable;
}
@Override
public OperationCallable<UpdateWorkflowRequest, Workflow, OperationMetadata>
updateWorkflowOperationCallable() {
return updateWorkflowOperationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| |
package biz.paluch.logging.gelf.log4j;
import static biz.paluch.logging.gelf.LogMessageField.NamedLogField.*;
import java.util.Collections;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
import biz.paluch.logging.RuntimeContainer;
import biz.paluch.logging.gelf.LogMessageField;
import biz.paluch.logging.gelf.MdcGelfMessageAssembler;
import biz.paluch.logging.gelf.intern.*;
/**
* Logging-Handler for GELF (Graylog Extended Logging Format). This Java-Util-Logging Handler creates GELF Messages and posts
* them using UDP (default) or TCP. Following parameters are supported/needed:
* <ul>
* <li>host (Mandatory): Hostname/IP-Address of the Logstash Host
* <ul>
* <li>(the host) for UDP, e.g. 127.0.0.1 or some.host.com</li>
* <li>See docs for more details</li>
* </ul>
* </li>
* <li>port (Optional): Port, default 12201</li>
* <li>version (Optional): GELF Version 1.0 or 1.1, default 1.0</li>
* <li>originHost (Optional): Originating Hostname, default FQDN Hostname</li>
* <li>extractStackTrace (Optional): Post Stack-Trace to StackTrace field (true/false/throwable reference [0 = throwable, 1 =
* throwable.cause, -1 = root cause]), default false</li>
* <li>filterStackTrace (Optional): Perform Stack-Trace filtering (true/false), default false</li>
* <li>includeLocation (Optional): Include source code location, default true</li>
* <li>mdcProfiling (Optional): Perform Profiling (Call-Duration) based on MDC Data. See <a href="#mdcProfiling">MDC
* Profiling</a>, default false</li>
* <li>facility (Optional): Name of the Facility, default gelf-java</li>
* <li>threshold (Optional): Log-Level, default INFO</li>
* <li>filter (Optional): Class-Name of a Log-Filter, default none</li>
* <li>additionalFields(number) (Optional): Post additional fields. Eg.
* .GelfLogHandler.additionalFields=fieldName=Value,field2=value2</li>
* <li>additionalFieldTypes (Optional): Type specification for additional and MDC fields. Supported types: String, long, Long,
* double, Double and discover (default if not specified, discover field type on parseability). Eg. field=String,field2=double</li>
* <li>mdcFields (Optional): Post additional fields, pull Values from MDC. Name of the Fields are comma-separated
* mdcFields=Application,Version,SomeOtherFieldName</li>
* <li>dynamicMdcFields (Optional): Dynamic MDC Fields allows you to extract MDC values based on one or more regular
* expressions. Multiple regex are comma-separated. The name of the MDC entry is used as GELF field name.</li>
* <li>dynamicMdcFieldTypes (Optional): Pattern-based type specification for additional and MDC fields. Key-value pairs are
* comma-separated. Supported types: String, long, Long, double, Double. Eg. my_field.*=String,business\..*\.field=double</li>
* <li>includeFullMdc (Optional): Include all fields from the MDC, default false</li>
* </ul>
* <a name="mdcProfiling"></a> <h2>MDC Profiling</h2>
* <p>
* MDC Profiling allows to calculate the runtime from request start up to the time until the log message was generated. You must
* set one value in the MDC:
* <ul>
* <li>profiling.requestStart.millis: Time Millis of the Request-Start (Long or String)</li>
* </ul>
* <p>
* Two values are set by the Log Appender:
* </p>
* <ul>
* <li>profiling.requestEnd: End-Time of the Request-End in Date.toString-representation</li>
* <li>profiling.requestDuration: Duration of the request (e.g. 205ms, 16sec)</li>
* </ul>
*
* The {@link #append(LoggingEvent)} method is thread-safe and may be called by different threads at any time.
*
* @author Mark Paluch
*/
public class GelfLogAppender extends AppenderSkeleton implements ErrorReporter {
protected GelfSender gelfSender;
protected MdcGelfMessageAssembler gelfMessageAssembler;
private final ErrorReporter errorReporter = new MessagePostprocessingErrorReporter(this);
public GelfLogAppender() {
super();
RuntimeContainer.initialize(errorReporter);
gelfMessageAssembler = new MdcGelfMessageAssembler();
gelfMessageAssembler.addFields(LogMessageField.getDefaultMapping(Time, Severity, ThreadName, SourceClassName,
SourceMethodName, SourceLineNumber, SourceSimpleClassName, LoggerName, NDC, Server));
}
@Override
protected void append(LoggingEvent event) {
if (event == null) {
return;
}
try {
GelfMessage message = createGelfMessage(event);
if (!message.isValid()) {
reportError("GELF Message is invalid: " + message.toJson(), null);
return;
}
if (null == gelfSender || !gelfSender.sendMessage(message)) {
reportError("Could not send GELF message", null);
}
} catch (Exception e) {
reportError("Could not send GELF message: " + e.getMessage(), e);
}
}
protected GelfSender createGelfSender() {
return GelfSenderFactory.createSender(gelfMessageAssembler, errorReporter, Collections.<String, Object>emptyMap());
}
@Override
public void reportError(String message, Exception exception) {
errorHandler.error(message, exception, 0);
}
@Override
public boolean requiresLayout() {
return false;
}
@Override
public void activateOptions() {
if (null == gelfSender) {
gelfSender = createGelfSender();
}
}
@Override
public void close() {
if (null != gelfSender) {
Closer.close(gelfSender);
gelfSender = null;
}
}
protected GelfMessage createGelfMessage(final LoggingEvent loggingEvent) {
return gelfMessageAssembler.createGelfMessage(new Log4jLogEvent(loggingEvent));
}
public void setAdditionalFields(String spec) {
ConfigurationSupport.setAdditionalFields(spec, gelfMessageAssembler);
}
public void setAdditionalFieldTypes(String spec) {
ConfigurationSupport.setAdditionalFieldTypes(spec, gelfMessageAssembler);
}
public void setMdcFields(String spec) {
ConfigurationSupport.setMdcFields(spec, gelfMessageAssembler);
}
public void setDynamicMdcFields(String spec) {
ConfigurationSupport.setDynamicMdcFields(spec, gelfMessageAssembler);
}
public void setDynamicMdcFieldTypes(String spec) {
ConfigurationSupport.setDynamicMdcFieldTypes(spec, gelfMessageAssembler);
}
public String getGraylogHost() {
return gelfMessageAssembler.getHost();
}
public void setGraylogHost(String graylogHost) {
gelfMessageAssembler.setHost(graylogHost);
}
public int getGraylogPort() {
return gelfMessageAssembler.getPort();
}
public void setGraylogPort(int graylogPort) {
gelfMessageAssembler.setPort(graylogPort);
}
public String getHost() {
return gelfMessageAssembler.getHost();
}
public void setHost(String host) {
gelfMessageAssembler.setHost(host);
}
public int getPort() {
return gelfMessageAssembler.getPort();
}
public void setPort(int port) {
gelfMessageAssembler.setPort(port);
}
public String getOriginHost() {
return gelfMessageAssembler.getOriginHost();
}
public void setOriginHost(String originHost) {
gelfMessageAssembler.setOriginHost(originHost);
}
public String getFacility() {
return gelfMessageAssembler.getFacility();
}
public void setFacility(String facility) {
gelfMessageAssembler.setFacility(facility);
}
public String getExtractStackTrace() {
return gelfMessageAssembler.getExtractStackTrace();
}
public void setExtractStackTrace(String extractStacktrace) {
gelfMessageAssembler.setExtractStackTrace(extractStacktrace);
}
public boolean isFilterStackTrace() {
return gelfMessageAssembler.isFilterStackTrace();
}
public void setFilterStackTrace(boolean filterStackTrace) {
gelfMessageAssembler.setFilterStackTrace(filterStackTrace);
}
public boolean isIncludeLocation() {
return gelfMessageAssembler.isIncludeLocation();
}
public void setIncludeLocation(boolean includeLocation) {
gelfMessageAssembler.setIncludeLocation(includeLocation);
}
public boolean isMdcProfiling() {
return gelfMessageAssembler.isMdcProfiling();
}
public void setMdcProfiling(boolean mdcProfiling) {
gelfMessageAssembler.setMdcProfiling(mdcProfiling);
}
public String getTimestampPattern() {
return gelfMessageAssembler.getTimestampPattern();
}
public void setTimestampPattern(String timestampPattern) {
gelfMessageAssembler.setTimestampPattern(timestampPattern);
}
public int getMaximumMessageSize() {
return gelfMessageAssembler.getMaximumMessageSize();
}
public void setMaximumMessageSize(int maximumMessageSize) {
gelfMessageAssembler.setMaximumMessageSize(maximumMessageSize);
}
public boolean isIncludeFullMdc() {
return gelfMessageAssembler.isIncludeFullMdc();
}
public void setIncludeFullMdc(boolean includeFullMdc) {
gelfMessageAssembler.setIncludeFullMdc(includeFullMdc);
}
public String getVersion() {
return gelfMessageAssembler.getVersion();
}
public void setVersion(String version) {
gelfMessageAssembler.setVersion(version);
}
}
| |
package com.ss.editor.ui.dialog.animation;
import static com.ss.editor.util.AnimationUtils.extractAnimation;
import static com.ss.rlib.common.util.ObjectUtils.notNull;
import com.jme3.animation.AnimControl;
import com.jme3.animation.Animation;
import com.ss.editor.Messages;
import com.ss.editor.annotation.BackgroundThread;
import com.ss.editor.annotation.FromAnyThread;
import com.ss.editor.annotation.FxThread;
import com.ss.editor.manager.ExecutorManager;
import com.ss.editor.model.undo.editor.ChangeConsumer;
import com.ss.editor.ui.control.tree.NodeTree;
import com.ss.editor.model.undo.impl.animation.AddAnimationNodeOperation;
import com.ss.editor.ui.control.tree.node.impl.control.anim.AnimationTreeNode;
import com.ss.editor.ui.css.CssClasses;
import com.ss.editor.ui.dialog.AbstractSimpleEditorDialog;
import com.ss.editor.ui.util.UiUtils;
import com.ss.editor.util.AnimationUtils;
import com.ss.rlib.fx.control.input.IntegerTextField;
import com.ss.rlib.fx.util.FXUtils;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.GridPane;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
/**
* The implementation of a dialog to extract a sub animation.
*
* @author JavaSaBr
*/
public class ExtractSubAnimationDialog extends AbstractSimpleEditorDialog {
@NotNull
private static final Point DIALOG_SIZE = new Point(390, 0);
@NotNull
private static final ExecutorManager EXECUTOR_MANAGER = ExecutorManager.getInstance();
/**
* The node tree component.
*/
@NotNull
private final NodeTree<?> nodeTree;
/**
* The animation node.
*/
@NotNull
private final AnimationTreeNode node;
/**
* The field with a value of new animation name.
*/
@Nullable
private TextField nameField;
/**
* The field with a value of start frame.
*/
@Nullable
private IntegerTextField startFrameField;
/**
* The field with a value of end frame.
*/
@Nullable
private IntegerTextField endFrameField;
public ExtractSubAnimationDialog(@NotNull final NodeTree<?> nodeTree, @NotNull final AnimationTreeNode node) {
this.nodeTree = nodeTree;
this.node = node;
final Animation animation = node.getElement();
final AnimControl control = notNull(node.getControl());
final int frameCount = AnimationUtils.getFrameCount(animation);
final TextField nameField = getNameField();
nameField.setText(AnimationUtils.findFreeName(control, Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_NAME_EXAMPLE));
final IntegerTextField startFrameField = getStartFrameField();
startFrameField.setMinMax(0, frameCount - 2);
startFrameField.setValue(0);
final IntegerTextField endFrameField = getEndFrameField();
endFrameField.setMinMax(1, frameCount - 1);
endFrameField.setValue(frameCount - 1);
}
/**
* Get the node tree.
*
* @return the node tree component.
*/
@FxThread
protected @NotNull NodeTree<?> getNodeTree() {
return nodeTree;
}
/**
* Get the animation node.
*
* @return the animation node.
*/
@FxThread
protected @NotNull AnimationTreeNode getNode() {
return node;
}
@Override
@FromAnyThread
protected @NotNull String getTitleText() {
return Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_TITLE;
}
@Override
@FxThread
protected void createContent(@NotNull final GridPane root) {
super.createContent(root);
final Label nameLabel = new Label(Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_NAME + ":");
nameLabel.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_LABEL_W_PERCENT));
nameField = new TextField();
nameField.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_FIELD_W_PERCENT));
final Label startFrameLabel = new Label(Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_START_FRAME + ":");
startFrameLabel.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_LABEL_W_PERCENT));
startFrameField = new IntegerTextField();
startFrameField.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_FIELD_W_PERCENT));
final Label endFrameLabel = new Label(Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_END_FRAME + ":");
endFrameLabel.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_LABEL_W_PERCENT));
endFrameField = new IntegerTextField();
endFrameField.prefWidthProperty().bind(root.widthProperty().multiply(DEFAULT_FIELD_W_PERCENT));
root.add(nameLabel, 0, 0);
root.add(nameField, 1, 0);
root.add(startFrameLabel, 0, 1);
root.add(startFrameField, 1, 1);
root.add(endFrameLabel, 0, 2);
root.add(endFrameField, 1, 2);
FXUtils.addClassTo(nameLabel, startFrameLabel, endFrameLabel, CssClasses.DIALOG_DYNAMIC_LABEL);
FXUtils.addClassTo(nameField, endFrameField, startFrameField, CssClasses.DIALOG_FIELD);
}
@Override
@FromAnyThread
protected boolean isGridStructure() {
return true;
}
/**
* Get the field with a value of new animation name.
*
* @return the field with a value of new animation name.
*/
@FxThread
private @NotNull TextField getNameField() {
return notNull(nameField);
}
/**
* Get the field with a value of start frame.
*
* @return the field with a value of start frame.
*/
@FxThread
private @NotNull IntegerTextField getStartFrameField() {
return notNull(startFrameField);
}
/**
* Get the field with a value of end frame.
*
* @return the field with a value of end frame.
*/
@FxThread
private @NotNull IntegerTextField getEndFrameField() {
return notNull(endFrameField);
}
@Override
@FxThread
protected void processOk() {
UiUtils.incrementLoading();
EXECUTOR_MANAGER.addBackgroundTask(this::processExtract);
super.processOk();
}
/**
* Process of extraction a sub animation.
*/
@BackgroundThread
private void processExtract() {
final AnimationTreeNode node = getNode();
final AnimControl control = notNull(node.getControl());
final Animation animation = node.getElement();
final TextField nameField = getNameField();
final IntegerTextField startFrameField = getStartFrameField();
final IntegerTextField endFrameField = getEndFrameField();
int startFrame = startFrameField.getValue();
int endFrame = endFrameField.getValue();
if (startFrame >= endFrame) {
startFrame = endFrame - 1;
}
final Animation subAnimation = extractAnimation(animation, nameField.getText(), startFrame, endFrame);
final NodeTree<?> nodeTree = getNodeTree();
final ChangeConsumer changeConsumer = notNull(nodeTree.getChangeConsumer());
changeConsumer.execute(new AddAnimationNodeOperation(subAnimation, control));
EXECUTOR_MANAGER.addFxTask(UiUtils::decrementLoading);
}
@Override
@FromAnyThread
protected @NotNull String getButtonOkText() {
return Messages.MANUAL_EXTRACT_ANIMATION_DIALOG_BUTTON_OK;
}
@Override
@FromAnyThread
protected @NotNull Point getSize() {
return DIALOG_SIZE;
}
}
| |
package ua.dudeweather;
import ua.dudeweather.dressup.Dude;
import javax.swing.*;
import java.awt.event.*;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
public class Form extends JDialog {
private JPanel contentPane;
private JButton buttonOK;
private JButton buttonCancel;
private JTextArea textArea1;
private JButton nextButton;
private JButton previousButton;
private JLabel timeLabel;
WeatherChanged weatherChanged1;
Navigator navigator1;
Forecaster forecaster1;
Location currentPos1;
Weather weather1;
Info info1;
Dude dude = new Dude();
boolean noPeriodN = false;
boolean noPeriodP = false;
int curDay = 6; int curHours = 16; int curMin = 30;
Date curDate = new Date(2012, 5, curDay, curHours, curMin);
SinglePeriod curPeriod;
public Form() throws ClassNotFoundException{
setContentPane(contentPane);
setModal(true);
getRootPane().setDefaultButton(buttonOK);
weatherChanged1 = new WeatherChanged();
currentPos1 = new Location();
weather1 = new Weather(50, 15, 3, 760);
info1 = new Info();
try {
info1.readWeatherInfo("D:\\weathersource.ddw");
} catch (IOException s) {
return;
}
System.out.println(info1.getKnownPeriods().size());
int curDay = 6; int curHours = 17; int curMin = 0;
Date curDate = new Date(2012, 5, curDay, curHours, curMin);
try {
curPeriod = info1.getCurrentPeriod(info1.getKnownPeriods(), curDate);
} catch (NoSuchPeriodException e) {
System.out.println(e.toString());
return;
}
drawPeriod(curPeriod);
dude.choose_cloth(info1.getKnownPeriods().get(curPeriod));
dude.draw(textArea1);
buttonOK.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onOK();
}
});
// call onCancel() when cross is clicked
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
onCancel();
}
});
// call onCancel() on ESCAPE
contentPane.registerKeyboardAction(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onCancel();
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
previousButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try{
onPrevious();
} catch (NoSuchPeriodException ex) {
textArea1.setText("");
textArea1.append("There's no information on this period");
}
}
});
nextButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
onNext();
} catch (NoSuchPeriodException ex) {
textArea1.setText("");
textArea1.append("There's no information on this period");
}
}
});
}
private void onPrevious() throws NoSuchPeriodException {
SinglePeriod prevPeriod = null;
int n = info1.getKnownPeriods().size();
int i = 0;
if (noPeriodN) {
dude.draw(textArea1);
drawPeriod(curPeriod);
noPeriodN = false;
return;
}
noPeriodP = false;
textArea1.setText("");
while (i<n) {
SinglePeriod chkPeriod = info1.getTimePeriods().get(i);
if (chkPeriod.getTimeBegin() == curPeriod.getTimeBegin() && chkPeriod.getTimeEnd() == curPeriod.getTimeEnd()) {
try {
prevPeriod = info1.getTimePeriods().get(i-1);
} catch (IndexOutOfBoundsException e) {
prevPeriod = null;
}
break;
}
i++;
}
if (prevPeriod == null) {
noPeriodP = true;
timeLabel.setText("... - "+curPeriod.getTimeBegin().getHours()+":"+curPeriod.getTimeBegin().getMinutes());
throw new NoSuchPeriodException();
}
curPeriod = prevPeriod;
Weather prevWeather = info1.getKnownPeriods().get(prevPeriod);
dude.choose_cloth(prevWeather);
dude.draw(textArea1);
this.drawPeriod(prevPeriod);
}
private void onNext() throws NoSuchPeriodException{
SinglePeriod nextPeriod = null;
int n = info1.getKnownPeriods().size();
int i = 0;
if (noPeriodP) {
dude.draw(textArea1);
drawPeriod(curPeriod);
noPeriodP = false;
return;
}
noPeriodN = false;
textArea1.setText("");
while (i<n) {
SinglePeriod chkPeriod = info1.getTimePeriods().get(i);
if (chkPeriod.getTimeBegin() == curPeriod.getTimeBegin() && chkPeriod.getTimeEnd() == curPeriod.getTimeEnd()) {
try {
nextPeriod = info1.getTimePeriods().get(i+1);
} catch (IndexOutOfBoundsException e) {
nextPeriod = null;
}
break;
}
i++;
}
if (nextPeriod == null) {
noPeriodN = true;
timeLabel.setText(curPeriod.getTimeEnd().getHours()+":"+curPeriod.getTimeEnd().getMinutes()+" - ...");
throw new NoSuchPeriodException();
}
curPeriod = nextPeriod;
Weather prevWeather = info1.getKnownPeriods().get(nextPeriod);
dude.choose_cloth(prevWeather);
dude.draw(textArea1);
this.drawPeriod(nextPeriod);
}
private void drawPeriod(SinglePeriod currentPeriod) {
this.timeLabel.setText(String.valueOf(currentPeriod.getTimeBegin().getHours())+":"+String.valueOf(currentPeriod.getTimeBegin().getMinutes())+" - "+String.valueOf(currentPeriod.getTimeEnd().getHours())+":"+String.valueOf(currentPeriod.getTimeEnd().getMinutes()));
}
private void onOK() {
// add your code here
dispose();
}
private void onCancel() {
// add your code here if necessary
dispose();
}
public static void main(String[] args) throws ClassNotFoundException{
Form dialog = new Form();
dialog.pack();
dialog.setVisible(true);
System.exit(0);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.model;
import java.io.IOException;
import java.io.Serializable;
import javax.servlet.ServletContext;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.RESTServlet;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
import org.glassfish.jersey.servlet.ServletContainer;
/**
* A representation of the collection of versions of the REST gateway software
* components.
* <ul>
* <li>restVersion: REST gateway revision</li>
* <li>jvmVersion: the JVM vendor and version information</li>
* <li>osVersion: the OS type, version, and hardware architecture</li>
* <li>serverVersion: the name and version of the servlet container</li>
* <li>jerseyVersion: the version of the embedded Jersey framework</li>
* </ul>
*/
@XmlRootElement(name="Version")
@InterfaceAudience.Private
public class VersionModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L;
private String restVersion;
private String jvmVersion;
private String osVersion;
private String serverVersion;
private String jerseyVersion;
/**
* Default constructor. Do not use.
*/
public VersionModel() {}
/**
* Constructor
* @param context the servlet context
*/
public VersionModel(ServletContext context) {
restVersion = RESTServlet.VERSION_STRING;
jvmVersion = System.getProperty("java.vm.vendor") + ' ' +
System.getProperty("java.version") + '-' +
System.getProperty("java.vm.version");
osVersion = System.getProperty("os.name") + ' ' +
System.getProperty("os.version") + ' ' +
System.getProperty("os.arch");
serverVersion = context.getServerInfo();
jerseyVersion = ServletContainer.class.getClass().getPackage()
.getImplementationVersion();
}
/**
* @return the REST gateway version
*/
@XmlAttribute(name="REST")
public String getRESTVersion() {
return restVersion;
}
/**
* @return the JVM vendor and version
*/
@XmlAttribute(name="JVM")
public String getJVMVersion() {
return jvmVersion;
}
/**
* @return the OS name, version, and hardware architecture
*/
@XmlAttribute(name="OS")
public String getOSVersion() {
return osVersion;
}
/**
* @return the servlet container version
*/
@XmlAttribute(name="Server")
public String getServerVersion() {
return serverVersion;
}
/**
* @return the version of the embedded Jersey framework
*/
@XmlAttribute(name="Jersey")
public String getJerseyVersion() {
return jerseyVersion;
}
/**
* @param version the REST gateway version string
*/
public void setRESTVersion(String version) {
this.restVersion = version;
}
/**
* @param version the OS version string
*/
public void setOSVersion(String version) {
this.osVersion = version;
}
/**
* @param version the JVM version string
*/
public void setJVMVersion(String version) {
this.jvmVersion = version;
}
/**
* @param version the servlet container version string
*/
public void setServerVersion(String version) {
this.serverVersion = version;
}
/**
* @param version the Jersey framework version string
*/
public void setJerseyVersion(String version) {
this.jerseyVersion = version;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("rest ");
sb.append(restVersion);
sb.append(" [JVM: ");
sb.append(jvmVersion);
sb.append("] [OS: ");
sb.append(osVersion);
sb.append("] [Server: ");
sb.append(serverVersion);
sb.append("] [Jersey: ");
sb.append(jerseyVersion);
sb.append("]\n");
return sb.toString();
}
@Override
public byte[] createProtobufOutput() {
Version.Builder builder = Version.newBuilder();
builder.setRestVersion(restVersion);
builder.setJvmVersion(jvmVersion);
builder.setOsVersion(osVersion);
builder.setServerVersion(serverVersion);
builder.setJerseyVersion(jerseyVersion);
return builder.build().toByteArray();
}
@Override
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Version.Builder builder = Version.newBuilder();
ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRestVersion()) {
restVersion = builder.getRestVersion();
}
if (builder.hasJvmVersion()) {
jvmVersion = builder.getJvmVersion();
}
if (builder.hasOsVersion()) {
osVersion = builder.getOsVersion();
}
if (builder.hasServerVersion()) {
serverVersion = builder.getServerVersion();
}
if (builder.hasJerseyVersion()) {
jerseyVersion = builder.getJerseyVersion();
}
return this;
}
}
| |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.mediarouter.media;
import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import android.content.Context;
import android.media.MediaRoute2ProviderService;
import android.media.RoutingSessionInfo;
import android.os.Build;
import android.os.Bundle;
import android.os.Messenger;
import android.support.mediacompat.testlib.util.PollingCheck;
import android.text.TextUtils;
import androidx.annotation.NonNull;
import androidx.mediarouter.media.MediaRouter.RouteInfo;
import androidx.mediarouter.testing.MediaRouterTestHelper;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import androidx.test.filters.SdkSuppress;
import androidx.test.filters.SmallTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* Tests features related to {@link android.media.MediaRouter2}.
*/
@RunWith(AndroidJUnit4.class)
@SdkSuppress(minSdkVersion = Build.VERSION_CODES.R)
public class MediaRouter2Test {
private static final String TAG = "MR2Test";
private static final int TIMEOUT_MS = 5_000;
private Context mContext;
private MediaRouter mRouter;
private MediaRouter.Callback mPlaceholderCallback = new MediaRouter.Callback() { };
StubMediaRouteProviderService mService;
StubMediaRouteProviderService.StubMediaRouteProvider mProvider;
MediaRouteProviderService.MediaRouteProviderServiceImplApi30 mServiceImpl;
MediaRoute2ProviderServiceAdapter mMr2ProviderServiceAdapter;
List<MediaRouter.Callback> mCallbacks;
MediaRouteSelector mSelector;
// Maps descriptor ID to RouteInfo for convenience.
Map<String, RouteInfo> mRoutes;
@Before
public void setUp() {
mContext = ApplicationProvider.getApplicationContext();
getInstrumentation().runOnMainSync(() -> mRouter = MediaRouter.getInstance(mContext));
mCallbacks = new ArrayList<>();
// Set a default selector.
mSelector = new MediaRouteSelector.Builder()
.addControlCategory(StubMediaRouteProviderService.CATEGORY_TEST)
.build();
MediaRouter2TestActivity.startActivity(mContext);
getInstrumentation().runOnMainSync(() -> {
MediaRouteSelector placeholderSelector = new MediaRouteSelector.Builder()
.addControlCategory("placeholder category").build();
mRouter.addCallback(placeholderSelector, mPlaceholderCallback,
MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
});
new PollingCheck(TIMEOUT_MS) {
@Override
protected boolean check() {
mService = StubMediaRouteProviderService.getInstance();
if (mService != null && mService.getMediaRouteProvider() != null) {
mProvider = (StubMediaRouteProviderService.StubMediaRouteProvider)
mService.getMediaRouteProvider();
mServiceImpl = (MediaRouteProviderService.MediaRouteProviderServiceImplApi30)
mService.mImpl;
mMr2ProviderServiceAdapter = mServiceImpl.mMR2ProviderServiceAdapter;
return mMr2ProviderServiceAdapter != null;
}
return false;
}
}.run();
getInstrumentation().runOnMainSync(() -> {
mProvider.initializeRoutes();
mProvider.publishRoutes();
});
}
@After
public void tearDown() {
getInstrumentation().runOnMainSync(() -> {
mRouter.removeCallback(mPlaceholderCallback);
for (MediaRouter.Callback callback : mCallbacks) {
mRouter.removeCallback(callback);
}
mCallbacks.clear();
MediaRouterTestHelper.resetMediaRouter();
});
MediaRouter2TestActivity.finishActivity();
}
@Test
@MediumTest
public void selectFromMr1AndStopFromSystem_unselect() throws Exception {
CountDownLatch onRouteSelectedLatch = new CountDownLatch(1);
CountDownLatch onRouteUnselectedLatch = new CountDownLatch(1);
CountDownLatch onRouteEnabledLatch = new CountDownLatch(1);
String descriptorId = StubMediaRouteProviderService.ROUTE_ID1;
addCallback(new MediaRouter.Callback() {
@Override
public void onRouteSelected(@NonNull MediaRouter router,
@NonNull RouteInfo selectedRoute, int reason,
@NonNull RouteInfo requestedRoute) {
if (TextUtils.equals(selectedRoute.getDescriptorId(), descriptorId)
&& reason == MediaRouter.UNSELECT_REASON_ROUTE_CHANGED) {
onRouteSelectedLatch.countDown();
}
}
@Override
public void onRouteUnselected(MediaRouter router, RouteInfo route, int reason) {
if (TextUtils.equals(route.getDescriptorId(), descriptorId)
&& reason == MediaRouter.UNSELECT_REASON_STOPPED) {
onRouteUnselectedLatch.countDown();
}
}
@Override
public void onRouteChanged(MediaRouter router, RouteInfo route) {
if (onRouteUnselectedLatch.getCount() == 0
&& TextUtils.equals(route.getDescriptorId(), descriptorId)
&& route.isEnabled()) {
onRouteEnabledLatch.countDown();
}
}
});
waitForRoutesAdded();
assertNotNull(mRoutes);
RouteInfo routeToSelect = mRoutes.get(descriptorId);
assertNotNull(routeToSelect);
getInstrumentation().runOnMainSync(() -> mRouter.selectRoute(routeToSelect));
assertTrue(onRouteSelectedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
// Wait for a session being created.
PollingCheck.waitFor(TIMEOUT_MS,
() -> !mMr2ProviderServiceAdapter.getAllSessionInfo().isEmpty());
//TODO: Find a correct session info
for (RoutingSessionInfo sessionInfo : mMr2ProviderServiceAdapter.getAllSessionInfo()) {
getInstrumentation().runOnMainSync(() ->
mMr2ProviderServiceAdapter.onReleaseSession(
MediaRoute2ProviderService.REQUEST_ID_NONE,
sessionInfo.getId()));
}
assertTrue(onRouteUnselectedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
// Make sure the route is enabled
assertTrue(onRouteEnabledLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
}
@SmallTest
@Test
public void onBinderDied_releaseRoutingSessions() throws Exception {
String descriptorId = StubMediaRouteProviderService.ROUTE_ID1;
waitForRoutesAdded();
assertNotNull(mRoutes);
RouteInfo routeToSelect = mRoutes.get(descriptorId);
assertNotNull(routeToSelect);
getInstrumentation().runOnMainSync(() -> mRouter.selectRoute(routeToSelect));
// Wait for a session being created.
PollingCheck.waitFor(TIMEOUT_MS,
() -> !mMr2ProviderServiceAdapter.getAllSessionInfo().isEmpty());
try {
List<Messenger> messengers =
mServiceImpl.mClients.stream().map(client -> client.mMessenger)
.collect(Collectors.toList());
getInstrumentation().runOnMainSync(() ->
messengers.forEach(mServiceImpl::onBinderDied));
// It should have no session info.
PollingCheck.waitFor(TIMEOUT_MS,
() -> mMr2ProviderServiceAdapter.getAllSessionInfo().isEmpty());
} finally {
// Rebind for future tests
getInstrumentation().runOnMainSync(
() -> {
MediaRouter.sGlobal.mRegisteredProviderWatcher.stop();
MediaRouter.sGlobal.mRegisteredProviderWatcher.start();
});
}
}
@SmallTest
@Test
public void setRouterParams_onRouteParamsChangedCalled() throws Exception {
CountDownLatch onRouterParmasChangedLatch = new CountDownLatch(1);
final MediaRouterParams[] routerParams = {null};
addCallback(new MediaRouter.Callback() {
@Override
public void onRouterParamsChanged(MediaRouter router, MediaRouterParams params) {
routerParams[0] = params;
onRouterParmasChangedLatch.countDown();
}
});
Bundle extras = new Bundle();
extras.putString("test-key", "test-value");
MediaRouterParams params = new MediaRouterParams.Builder().setExtras(extras).build();
getInstrumentation().runOnMainSync(() -> {
mRouter.setRouterParams(params);
});
assertTrue(onRouterParmasChangedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
Bundle actualExtras = routerParams[0].getExtras();
assertNotNull(actualExtras);
assertEquals("test-value", actualExtras.getString("test-key"));
}
void addCallback(MediaRouter.Callback callback) {
getInstrumentation().runOnMainSync(() -> {
mRouter.addCallback(mSelector, callback, MediaRouter.CALLBACK_FLAG_REQUEST_DISCOVERY
| MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
});
mCallbacks.add(callback);
}
void waitForRoutesAdded() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
MediaRouter.Callback callback = new MediaRouter.Callback() {
@Override
public void onRouteAdded(MediaRouter router, RouteInfo route) {
if (!route.isDefaultOrBluetooth()) {
latch.countDown();
}
}
};
addCallback(callback);
latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS);
getInstrumentation().runOnMainSync(() -> mRoutes = mRouter.getRoutes().stream().collect(
Collectors.toMap(route -> route.getDescriptorId(), route -> route)));
}
}
| |
/*
* Copyright (C) 2011 Thomas Akehurst
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.tomakehurst.wiremock.verification;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.github.tomakehurst.wiremock.common.Dates;
import com.github.tomakehurst.wiremock.common.Json;
import com.github.tomakehurst.wiremock.http.*;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import java.net.URI;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import java.nio.charset.Charset;
import static com.google.common.base.Charsets.UTF_8;
import static com.github.tomakehurst.wiremock.common.Encoding.decodeBase64;
import static com.github.tomakehurst.wiremock.common.Encoding.encodeBase64;
import static com.github.tomakehurst.wiremock.common.Strings.stringFromBytes;
import static com.github.tomakehurst.wiremock.common.Urls.splitQuery;
import static com.github.tomakehurst.wiremock.http.HttpHeaders.copyOf;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.collect.FluentIterable.from;
@JsonIgnoreProperties(ignoreUnknown = true)
public class LoggedRequest implements Request {
private final String url;
private final String absoluteUrl;
private final String clientIp;
private final RequestMethod method;
private final HttpHeaders headers;
private final Map<String, Cookie> cookies;
private final Map<String, QueryParameter> queryParams;
private final byte[] body;
private final boolean isBrowserProxyRequest;
private final Date loggedDate;
private final Collection<Part> multiparts;
public static LoggedRequest createFrom(Request request) {
return new LoggedRequest(request.getUrl(),
request.getAbsoluteUrl(),
request.getMethod(),
request.getClientIp(),
copyOf(request.getHeaders()),
ImmutableMap.copyOf(request.getCookies()),
request.isBrowserProxyRequest(),
new Date(),
request.getBody(),
request.getParts()
);
}
@JsonCreator
public LoggedRequest(
@JsonProperty("url") String url,
@JsonProperty("absoluteUrl") String absoluteUrl,
@JsonProperty("method") RequestMethod method,
@JsonProperty("clientIp") String clientIp,
@JsonProperty("headers") HttpHeaders headers,
@JsonProperty("cookies") Map<String, Cookie> cookies,
@JsonProperty("browserProxyRequest") boolean isBrowserProxyRequest,
@JsonProperty("loggedDate") Date loggedDate,
@JsonProperty("bodyAsBase64") String bodyAsBase64,
@JsonProperty("body") String ignoredBodyOnlyUsedForBinding,
@JsonProperty("multiparts") Collection<Part> multiparts) {
this(url, absoluteUrl, method, clientIp, headers, cookies, isBrowserProxyRequest, loggedDate, decodeBase64(bodyAsBase64), multiparts);
}
public LoggedRequest(
@JsonProperty("url") String url,
@JsonProperty("absoluteUrl") String absoluteUrl,
@JsonProperty("method") RequestMethod method,
@JsonProperty("clientIp") String clientIp,
@JsonProperty("headers") HttpHeaders headers,
@JsonProperty("cookies") Map<String, Cookie> cookies,
@JsonProperty("browserProxyRequest") boolean isBrowserProxyRequest,
@JsonProperty("loggedDate") Date loggedDate,
@JsonProperty("body") byte[] body,
@JsonProperty("multiparts") Collection<Part> multiparts) {
this.url = url;
this.absoluteUrl = absoluteUrl;
this.clientIp = clientIp;
this.method = method;
this.body = body;
this.headers = headers;
this.cookies = cookies;
this.queryParams = splitQuery(URI.create(url));
this.isBrowserProxyRequest = isBrowserProxyRequest;
this.loggedDate = loggedDate;
this.multiparts = multiparts;
}
@Override
public String getUrl() {
return url;
}
@Override
public String getAbsoluteUrl() {
return absoluteUrl;
}
@Override
public RequestMethod getMethod() {
return method;
}
@Override
public String getClientIp() {
return clientIp;
}
@Override
@JsonIgnore
public String getHeader(String key) {
HttpHeader header = header(key);
if (header.isPresent()) {
return header.firstValue();
}
return null;
}
@Override
public HttpHeader header(String key) {
return headers.getHeader(key);
}
@Override
public ContentTypeHeader contentTypeHeader() {
if (headers != null) {
return headers.getContentTypeHeader();
}
return null;
}
private Charset encodingFromContentTypeHeaderOrUtf8() {
ContentTypeHeader contentTypeHeader = contentTypeHeader();
if (contentTypeHeader != null) {
return contentTypeHeader.charset();
}
return UTF_8;
}
@Override
public boolean containsHeader(String key) {
return getHeader(key) != null;
}
@Override
public Map<String, Cookie> getCookies() {
return cookies;
}
@Override
public byte[] getBody() {
return body;
}
@Override
@JsonProperty("body")
public String getBodyAsString() {
return stringFromBytes(body, encodingFromContentTypeHeaderOrUtf8());
}
@Override
@JsonProperty("bodyAsBase64")
public String getBodyAsBase64() {
return encodeBase64(body);
}
@Override
@JsonIgnore
public Set<String> getAllHeaderKeys() {
return headers.keys();
}
@Override
public QueryParameter queryParameter(String key) {
return firstNonNull(queryParams.get(key), QueryParameter.absent(key));
}
@JsonProperty("queryParams")
public Map<String, QueryParameter> getQueryParams() {
return queryParams;
}
public HttpHeaders getHeaders() {
return headers;
}
@Override
public boolean isBrowserProxyRequest() {
return isBrowserProxyRequest;
}
@JsonIgnore
@Override
public Optional<Request> getOriginalRequest() {
return Optional.absent();
}
public Date getLoggedDate() {
return loggedDate;
}
public String getLoggedDateString() {
return Dates.format(loggedDate);
}
@Override
public String toString() {
return Json.write(this);
}
@JsonIgnore
@Override
public boolean isMultipart() {
return (multiparts != null && multiparts.size() > 0);
}
@JsonIgnore
@Override
public Collection<Part> getParts() {
return multiparts;
}
@JsonIgnore
@Override
public Part getPart(final String name) {
return (multiparts != null && name != null) ? from(multiparts).firstMatch(new Predicate<Part>() {
@Override
public boolean apply(Part input) {
return (name.equals(input.getName()));
}
}).get() : null;
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.xslt;
import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace;
import static org.pentaho.di.job.entry.validator.AndValidator.putValidators;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.Properties;
import javax.xml.transform.Source;
import javax.xml.transform.Templates;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.commons.vfs.FileObject;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceReference;
import org.w3c.dom.Node;
/**
* This defines a 'xslt' job entry.
*
* @author Samatar Hassan
* @since 02-03-2007
*
*/
public class JobEntryXSLT extends JobEntryBase implements Cloneable, JobEntryInterface
{
private static Class<?> PKG = JobEntryXSLT.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
public static String FACTORY_JAXP="JAXP";
public static String FACTORY_SAXON="SAXON";
private String xmlfilename;
private String xslfilename;
private String outputfilename;
public int iffileexists;
private boolean addfiletoresult;
private String xsltfactory;
private boolean filenamesfromprevious;
/** output property name*/
private String outputPropertyName[];
/** output property value */
private String outputPropertyValue[];
/** parameter name*/
private String parameterName[];
/** parameter field */
private String parameterField[];
private int nrParams;
private String nameOfParams[];
private String valueOfParams[];
private boolean useParameters;
private Properties outputProperties;
private boolean setOutputProperties;
public JobEntryXSLT(String n)
{
super(n, "");
xmlfilename=null;
xslfilename=null;
outputfilename=null;
iffileexists=1;
addfiletoresult = false;
filenamesfromprevious=false;
xsltfactory=FACTORY_JAXP;
int nrparams = 0;
int nroutputproperties = 0;
allocate(nrparams, nroutputproperties);
for (int i = 0; i < nrparams; i++)
{
parameterField[i] = "param" + i; //$NON-NLS-1$
parameterName[i] = "param"; //$NON-NLS-1$
}
for (int i = 0; i < nroutputproperties; i++)
{
outputPropertyName[i] = "outputprop" + i; //$NON-NLS-1$
outputPropertyValue[i] = "outputprop"; //$NON-NLS-1$
}
setID(-1L);
}
public void allocate(int nrParameters, int outputProps)
{
parameterName = new String[nrParameters];
parameterField = new String[nrParameters];
outputPropertyName = new String[outputProps];
outputPropertyValue = new String[outputProps];
}
public JobEntryXSLT()
{
this("");
}
public Object clone()
{
JobEntryXSLT je = (JobEntryXSLT)super.clone();
int nrparams = parameterName.length;
int nroutputprops = outputPropertyName.length;
je.allocate(nrparams, nroutputprops);
for (int i = 0; i < nrparams; i++)
{
je.parameterName[i] = parameterName[i];
je.parameterField[i] = parameterField[i];
}
for (int i = 0; i < nroutputprops; i++)
{
je.outputPropertyName[i] = outputPropertyName[i];
je.outputPropertyValue[i] = outputPropertyValue[i];
}
return je;
}
public String getXML()
{
StringBuffer retval = new StringBuffer(50);
retval.append(super.getXML());
retval.append(" ").append(XMLHandler.addTagValue("xmlfilename", xmlfilename));
retval.append(" ").append(XMLHandler.addTagValue("xslfilename", xslfilename));
retval.append(" ").append(XMLHandler.addTagValue("outputfilename", outputfilename));
retval.append(" ").append(XMLHandler.addTagValue("iffileexists", iffileexists));
retval.append(" ").append(XMLHandler.addTagValue("addfiletoresult", addfiletoresult));
retval.append(" ").append(XMLHandler.addTagValue("filenamesfromprevious", filenamesfromprevious));
retval.append(" ").append(XMLHandler.addTagValue("xsltfactory", xsltfactory));
retval.append(" <parameters>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < parameterName.length; i++)
{
retval.append(" <parameter>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("field", parameterField[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("name", parameterName[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </parameter>").append(Const.CR); //$NON-NLS-1$
}
retval.append(" </parameters>").append(Const.CR); //$NON-NLS-1$
retval.append(" <outputproperties>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < outputPropertyName.length; i++)
{
retval.append(" <outputproperty>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", outputPropertyName[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("value", outputPropertyValue[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </outputproperty>").append(Const.CR); //$NON-NLS-1$
}
retval.append(" </outputproperties>").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
xmlfilename = XMLHandler.getTagValue(entrynode, "xmlfilename");
xslfilename = XMLHandler.getTagValue(entrynode, "xslfilename");
outputfilename = XMLHandler.getTagValue(entrynode, "outputfilename");
iffileexists = Const.toInt(XMLHandler.getTagValue(entrynode, "iffileexists"), -1);
addfiletoresult = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "addfiletoresult"));
filenamesfromprevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "filenamesfromprevious"));
xsltfactory = XMLHandler.getTagValue(entrynode, "xsltfactory");
if(xsltfactory==null) xsltfactory=FACTORY_JAXP;
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters"); //$NON-NLS-1$
int nrparams = XMLHandler.countNodes(parametersNode, "parameter"); //$NON-NLS-1$
Node parametersOutputProps = XMLHandler.getSubNode(entrynode, "outputproperties"); //$NON-NLS-1$
int nroutputprops = XMLHandler.countNodes(parametersOutputProps, "outputproperty");
allocate(nrparams, nroutputprops);
for (int i = 0; i < nrparams; i++)
{
Node anode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i); //$NON-NLS-1$
parameterField[i] = XMLHandler.getTagValue(anode, "field"); //$NON-NLS-1$
parameterName[i] = XMLHandler.getTagValue(anode, "name"); //$NON-NLS-1$
}
for (int i = 0; i < nroutputprops; i++)
{
Node anode = XMLHandler.getSubNodeByNr(parametersOutputProps, "outputproperty", i); //$NON-NLS-1$
outputPropertyName[i] = XMLHandler.getTagValue(anode, "name"); //$NON-NLS-1$
outputPropertyValue[i] = XMLHandler.getTagValue(anode, "value"); //$NON-NLS-1$
}
}
catch(KettleXMLException xe)
{
throw new KettleXMLException("Unable to load job entry of type 'xslt' from XML node", xe);
}
}
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
xmlfilename = rep.getJobEntryAttributeString(id_jobentry, "xmlfilename");
xslfilename = rep.getJobEntryAttributeString(id_jobentry, "xslfilename");
outputfilename = rep.getJobEntryAttributeString(id_jobentry, "outputfilename");
iffileexists=(int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
filenamesfromprevious=rep.getJobEntryAttributeBoolean(id_jobentry, "filenamesfromprevious");
xsltfactory = rep.getJobEntryAttributeString(id_jobentry, "xsltfactory");
if(xsltfactory==null) xsltfactory=FACTORY_JAXP;
int nrparams = rep.countNrJobEntryAttributes(id_jobentry, "param_name"); //$NON-NLS-1$
int nroutputprops = rep.countNrJobEntryAttributes(id_jobentry, "output_property_name"); //$NON-NLS-1$
allocate(nrparams, nroutputprops);
for (int i = 0; i < nrparams; i++)
{
parameterField[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_field"); //$NON-NLS-1$
parameterName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_name"); //$NON-NLS-1$
}
for (int i = 0; i < nroutputprops; i++)
{
outputPropertyName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_name"); //$NON-NLS-1$
outputPropertyValue[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_value"); //$NON-NLS-1$
}
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'xslt' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
public void saveRep(Repository rep, ObjectId id_job)
throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "xmlfilename", xmlfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "xslfilename", xslfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "outputfilename", outputfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "iffileexists", iffileexists);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filenamesfromprevious", filenamesfromprevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "xsltfactory", xsltfactory);
for (int i = 0; i < parameterName.length; i++)
{
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "param_field", parameterField[i]); //$NON-NLS-1$
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "param_name", parameterName[i]); //$NON-NLS-1$
}
for (int i = 0; i < outputPropertyName.length; i++)
{
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "output_property_name", outputPropertyName[i]); //$NON-NLS-1$
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "output_property_value", outputPropertyValue[i]); //$NON-NLS-1$
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry of type 'xslt' to the repository for id_job="+id_job, dbe);
}
}
public String getXSLTFactory()
{
return xsltfactory;
}
public void setXSLTFactory(String xsltfactoryin)
{
xsltfactory=xsltfactoryin;
}
public String getRealxmlfilename()
{
return environmentSubstitute(getxmlFilename());
}
public String getoutputfilename()
{
return environmentSubstitute(getoutputFilename());
}
public boolean isFilenamesFromPrevious()
{
return filenamesfromprevious;
}
public void setFilenamesFromPrevious(boolean filenamesfromprevious)
{
this.filenamesfromprevious= filenamesfromprevious;
}
public String getRealxslfilename()
{
return environmentSubstitute(getxslFilename());
}
public Result execute(Result previousResult, int nr) throws KettleException
{
Result result = previousResult;
int NrErrors=0;
int NrSuccess=0;
// Check output parameters
int nrOutputProps= getOutputPropertyName()==null?0:getOutputPropertyName().length;
if(nrOutputProps>0) {
outputProperties= new Properties();
for(int i=0; i<nrOutputProps; i++) {
outputProperties.put(getOutputPropertyName()[i], environmentSubstitute(getOutputPropertyValue()[i]));
}
setOutputProperties=true;
}
// Check parameters
nrParams= getParameterField()==null?0:getParameterField().length;
if(nrParams>0) {
nameOfParams = new String[nrParams];
valueOfParams = new String[nrParams];
for(int i=0; i<nrParams; i++) {
String name = environmentSubstitute(getParameterName()[i]);
String value = environmentSubstitute(getParameterField()[i]);
if(Const.isEmpty(value)) {
throw new KettleStepException(BaseMessages.getString(PKG, "Xslt.Exception.ParameterFieldMissing", name, i));
}
nameOfParams[i]=name;
valueOfParams[i]=value;
}
useParameters=true;
}
List<RowMetaAndData> rows = result.getRows();
if (isFilenamesFromPrevious())
{
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryXSLT.Log.ArgFromPrevious.Found",(rows!=null?rows.size():0)+ ""));
}
if (isFilenamesFromPrevious() && rows!=null) // Copy the input row to the (command line) arguments
{
RowMetaAndData resultRow = null;
for (int iteration=0;iteration<rows.size() && !parentJob.isStopped();iteration++)
{
resultRow = rows.get(iteration);
// Get filenames (xml, xsl, output filename)
String xmlfilename_previous = resultRow.getString(0,null);
String xslfilename_previous = resultRow.getString(1,null);
String ouputfilename_previous = resultRow.getString(2,null);
if (!Const.isEmpty(xmlfilename_previous) && !Const.isEmpty(xslfilename_previous) && !Const.isEmpty(ouputfilename_previous))
{
if(processOneXMLFile(xmlfilename_previous, xslfilename_previous, ouputfilename_previous, result, parentJob))
NrSuccess++;
else
NrErrors++;
}else
{
// We failed!
logError( BaseMessages.getString(PKG, "JobEntryXSLT.AllFilesNotNull.Label"));
NrErrors++;
}
}
}else
{
String realxmlfilename = getRealxmlfilename();
String realxslfilename = getRealxslfilename();
String realoutputfilename = getoutputfilename();
if (!Const.isEmpty(realxmlfilename) && !Const.isEmpty(realxslfilename) && !Const.isEmpty(realoutputfilename))
{
if(processOneXMLFile(realxmlfilename, realxslfilename, realoutputfilename, result, parentJob))
NrSuccess++;
else
NrErrors++;
}
else
{
// We failed!
logError( BaseMessages.getString(PKG, "JobEntryXSLT.AllFilesNotNull.Label"));
NrErrors++;
}
}
result.setResult(NrErrors==0);
result.setNrErrors(NrErrors);
result.setNrLinesWritten(NrSuccess);
return result;
}
private boolean processOneXMLFile(String xmlfilename, String xslfilename, String outputfilename, Result result, Job parentJob)
{
boolean retval=false;
FileObject xmlfile = null;
FileObject xslfile = null;
FileObject outputfile = null;
try
{
xmlfile = KettleVFS.getFileObject(xmlfilename, this);
xslfile = KettleVFS.getFileObject(xslfilename, this);
outputfile = KettleVFS.getFileObject(outputfilename, this);
if ( xmlfile.exists() && xslfile.exists() )
{
if (outputfile.exists() && iffileexists==2)
{
//Output file exists
// User want to fail
logError(BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label")
+ outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label"));
return retval;
}
else if (outputfile.exists() && iffileexists==1)
{
// Do nothing
if(log.isDebug()) logDebug(BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label")
+ outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label"));
retval=true;
return retval;
}
else
{
if (outputfile.exists() && iffileexists==0)
{
// the output file exists and user want to create new one with unique name
//Format Date
// Try to clean filename (without wildcard)
String wildcard = outputfilename.substring(outputfilename.length()-4,outputfilename.length());
if(wildcard.substring(0,1).equals("."))
{
// Find wildcard
outputfilename=outputfilename.substring(0,outputfilename.length()-4) +
"_" + StringUtil.getFormattedDateTimeNow(true) + wildcard;
}
else
{
// did not find wildcard
outputfilename=outputfilename + "_" + StringUtil.getFormattedDateTimeNow(true);
}
if(log.isDebug())
{
logDebug( BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label") +
outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label"));
logDebug(BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileNameChange1.Label") + outputfilename +
BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileNameChange2.Label"));
}
}
// Create transformer factory
TransformerFactory factory = TransformerFactory.newInstance();
if (xsltfactory.equals(FACTORY_SAXON))
{
// Set the TransformerFactory to the SAXON implementation.
factory = (TransformerFactory)new net.sf.saxon.TransformerFactoryImpl();
}
if (log.isDetailed()) log.logDetailed(BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerFactoryInfos"),BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerFactory",factory.getClass().getName()));
InputStream xslInputStream = KettleVFS.getInputStream(xslfile);
InputStream xmlInputStream = KettleVFS.getInputStream(xmlfile);
OutputStream os = null;
try {
// Use the factory to create a template containing the xsl file
Templates template = factory.newTemplates(new StreamSource( xslInputStream ));
// Use the template to create a transformer
Transformer xformer = template.newTransformer();
if (log.isDetailed()) log.logDetailed(BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerClassInfos"),BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerClass",xformer.getClass().getName()));
// Do we need to set output properties?
if(setOutputProperties) {
xformer.setOutputProperties(outputProperties);
}
// Do we need to pass parameters?
if(useParameters) {
for(int i=0; i<nrParams; i++) {
xformer.setParameter(nameOfParams[i], valueOfParams[i]);
}
}
// Prepare the input and output files
Source source = new StreamSource( xmlInputStream );
os=KettleVFS.getOutputStream(outputfile, false);
StreamResult resultat = new StreamResult(os);
// Apply the xsl file to the source file and write the result to the output file
xformer.transform(source, resultat);
if (isAddFileToResult()) {
// Add output filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(outputfilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
// Everything is OK
retval=true;
} finally {
try {
xslInputStream.close();
} catch (IOException ignored) {
// ignore IO Exception on close
}
try {
xmlInputStream.close();
} catch (IOException ignored) {
// ignore IO Exception on close
}
try {
if(os!=null) os.close();
} catch (IOException ignored) {
// ignore IO Exception on close
}
}
}
}
else
{
if( !xmlfile.exists())
{
logError( BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist1.Label") +
xmlfilename + BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist2.Label"));
}
if(!xslfile.exists())
{
logError( BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist1.Label") +
xmlfilename + BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist2.Label"));
}
}
}
catch ( Exception e )
{
logError(BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLST.Label") +
BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXML1.Label") + xmlfilename +
BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXML2.Label") +
BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXSL1.Label") + xslfilename +
BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXSL2.Label") + e.getMessage());
}
finally
{
try
{
if ( xmlfile != null )
xmlfile.close();
if ( xslfile != null )
xslfile.close();
if ( outputfile != null )
outputfile.close();
}
catch ( IOException e ) { }
}
return retval;
}
public boolean evaluates()
{
return true;
}
public void setxmlFilename(String filename)
{
this.xmlfilename = filename;
}
public String getxmlFilename()
{
return xmlfilename;
}
public String getoutputFilename()
{
return outputfilename;
}
public void setoutputFilename(String outputfilename)
{
this.outputfilename = outputfilename;
}
public void setxslFilename(String filename)
{
this.xslfilename = filename;
}
public String getxslFilename()
{
return xslfilename;
}
public void setAddFileToResult(boolean addfiletoresultin)
{
this.addfiletoresult = addfiletoresultin;
}
public boolean isAddFileToResult()
{
return addfiletoresult;
}
public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) {
List<ResourceReference> references = super.getResourceDependencies(jobMeta);
if ( (!Const.isEmpty(xslfilename)) && (!Const.isEmpty(xmlfilename)) ) {
String realXmlFileName = jobMeta.environmentSubstitute(xmlfilename);
String realXslFileName = jobMeta.environmentSubstitute(xslfilename);
ResourceReference reference = new ResourceReference(this);
reference.getEntries().add( new ResourceEntry(realXmlFileName, ResourceType.FILE));
reference.getEntries().add( new ResourceEntry(realXslFileName, ResourceType.FILE));
references.add(reference);
}
return references;
}
@Override
public void check(List<CheckResultInterface> remarks, JobMeta jobMeta)
{
ValidatorContext ctx = new ValidatorContext();
putVariableSpace(ctx, getVariables());
putValidators(ctx, notBlankValidator(), fileExistsValidator());
andValidator().validate(this, "xmlFilename", remarks, ctx);//$NON-NLS-1$
andValidator().validate(this, "xslFilename", remarks, ctx);//$NON-NLS-1$
andValidator().validate(this, "outputFilename", remarks, putValidators(notBlankValidator()));//$NON-NLS-1$
}
/**
* @return Returns the OutputPropertyName.
*/
public String[] getOutputPropertyName()
{
return outputPropertyName;
}
/**
* @param argumentDirection The OutputPropertyName to set.
*/
public void setOutputPropertyName(String[] argumentDirection)
{
this.outputPropertyName = argumentDirection;
}
/**
* @return Returns the OutputPropertyField.
*/
public String[] getOutputPropertyValue()
{
return outputPropertyValue;
}
/**
* @param argumentDirection The outputPropertyValue to set.
*/
public void setOutputPropertyValue(String[] argumentDirection)
{
this.outputPropertyValue = argumentDirection;
}
/**
* @return Returns the parameterName.
*/
public String[] getParameterName()
{
return parameterName;
}
/**
* @param argumentDirection The parameterName to set.
*/
public void setParameterName(String[] argumentDirection)
{
this.parameterName = argumentDirection;
}
/**
* @return Returns the parameterField.
*/
public String[] getParameterField()
{
return parameterField;
}
}
| |
package org.keycloak.exportimport.util;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.map.ObjectMapper;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.exportimport.Strategy;
import org.keycloak.models.AdminRoles;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RealmProvider;
import org.keycloak.models.RoleModel;
import org.keycloak.models.utils.KeycloakModelUtils;
import org.keycloak.models.utils.RepresentationToModel;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.keycloak.exportimport.ExportImportConfig;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class ImportUtils {
private static final Logger logger = Logger.getLogger(ImportUtils.class);
/**
* Fully import realm from representation, save it to model and return model of newly created realm
*
* @param session
* @param rep
* @param strategy specifies whether to overwrite or ignore existing realm or user entries
* @return newly imported realm (or existing realm if ignoreExisting is true and realm of this name already exists)
*/
public static RealmModel importRealm(KeycloakSession session, RealmRepresentation rep, Strategy strategy) {
String realmName = rep.getRealm();
RealmProvider model = session.realms();
RealmModel realm = model.getRealmByName(realmName);
if (realm != null) {
if (strategy == Strategy.IGNORE_EXISTING) {
logger.infof("Realm '%s' already exists. Import skipped", realmName);
return realm;
} else {
logger.infof("Realm '%s' already exists. Removing it before import", realmName);
if (Config.getAdminRealm().equals(realm.getId())) {
// Delete all masterAdmin apps due to foreign key constraints
for (RealmModel currRealm : model.getRealms()) {
currRealm.setMasterAdminClient(null);
}
}
// TODO: For migration between versions, it should be possible to delete just realm but keep it's users
model.removeRealm(realm.getId());
}
}
realm = rep.getId() != null ? model.createRealm(rep.getId(), realmName) : model.createRealm(realmName);
RepresentationToModel.importRealm(session, rep, realm);
refreshMasterAdminApps(model, realm);
if (System.getProperty(ExportImportConfig.ACTION) != null) {
logger.infof("Realm '%s' imported", realmName);
}
return realm;
}
private static void refreshMasterAdminApps(RealmProvider model, RealmModel realm) {
String adminRealmId = Config.getAdminRealm();
if (adminRealmId.equals(realm.getId())) {
// We just imported master realm. All 'masterAdminApps' need to be refreshed
RealmModel adminRealm = realm;
for (RealmModel currentRealm : model.getRealms()) {
ClientModel masterApp = adminRealm.getClientByClientId(KeycloakModelUtils.getMasterRealmAdminApplicationClientId(currentRealm));
if (masterApp != null) {
currentRealm.setMasterAdminClient(masterApp);
} else {
setupMasterAdminManagement(model, currentRealm);
}
}
} else {
// Need to refresh masterApp for current realm
RealmModel adminRealm = model.getRealm(adminRealmId);
ClientModel masterApp = adminRealm.getClientByClientId(KeycloakModelUtils.getMasterRealmAdminApplicationClientId(realm));
if (masterApp != null) {
realm.setMasterAdminClient(masterApp);
} else {
setupMasterAdminManagement(model, realm);
}
}
}
// TODO: We need method here, so we are able to refresh masterAdmin applications after import. Should be RealmManager moved to model/api instead?
public static void setupMasterAdminManagement(RealmProvider model, RealmModel realm) {
RealmModel adminRealm;
RoleModel adminRole;
if (realm.getName().equals(Config.getAdminRealm())) {
adminRealm = realm;
adminRole = realm.addRole(AdminRoles.ADMIN);
RoleModel createRealmRole = realm.addRole(AdminRoles.CREATE_REALM);
adminRole.addCompositeRole(createRealmRole);
createRealmRole.setDescription("${role_"+AdminRoles.CREATE_REALM+"}");
} else {
adminRealm = model.getRealmByName(Config.getAdminRealm());
adminRole = adminRealm.getRole(AdminRoles.ADMIN);
}
adminRole.setDescription("${role_"+AdminRoles.ADMIN+"}");
ClientModel realmAdminApp = KeycloakModelUtils.createClient(adminRealm, KeycloakModelUtils.getMasterRealmAdminApplicationClientId(realm));
// No localized name for now
realmAdminApp.setName(realm.getName() + " Realm");
realmAdminApp.setBearerOnly(true);
realm.setMasterAdminClient(realmAdminApp);
for (String r : AdminRoles.ALL_REALM_ROLES) {
RoleModel role = realmAdminApp.addRole(r);
role.setDescription("${role_"+r+"}");
adminRole.addCompositeRole(role);
}
}
/**
* Fully import realm (or more realms from particular stream)
*
* @param session
* @param mapper
* @param is
* @param strategy
* @throws IOException
*/
public static void importFromStream(KeycloakSession session, ObjectMapper mapper, InputStream is, Strategy strategy) throws IOException {
JsonFactory factory = mapper.getJsonFactory();
JsonParser parser = factory.createJsonParser(is);
try {
parser.nextToken();
if (parser.getCurrentToken() == JsonToken.START_ARRAY) {
// Case with more realms in stream
parser.nextToken();
List<RealmRepresentation> realmReps = new ArrayList<RealmRepresentation>();
while (parser.getCurrentToken() == JsonToken.START_OBJECT) {
RealmRepresentation realmRep = parser.readValueAs(RealmRepresentation.class);
parser.nextToken();
// Ensure that master realm is imported first
if (Config.getAdminRealm().equals(realmRep.getRealm())) {
realmReps.add(0, realmRep);
} else {
realmReps.add(realmRep);
}
}
for (RealmRepresentation realmRep : realmReps) {
importRealm(session, realmRep, strategy);
}
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
// Case with single realm in stream
RealmRepresentation realmRep = parser.readValueAs(RealmRepresentation.class);
importRealm(session, realmRep, strategy);
}
} finally {
parser.close();
}
}
// Assuming that it's invoked inside transaction
public static void importUsersFromStream(KeycloakSession session, String realmName, ObjectMapper mapper, InputStream is) throws IOException {
RealmProvider model = session.realms();
JsonFactory factory = mapper.getJsonFactory();
JsonParser parser = factory.createJsonParser(is);
try {
parser.nextToken();
while (parser.nextToken() == JsonToken.FIELD_NAME) {
if ("realm".equals(parser.getText())) {
parser.nextToken();
String currRealmName = parser.getText();
if (!currRealmName.equals(realmName)) {
throw new IllegalStateException("Trying to import users into invalid realm. Realm name: " + realmName + ", Expected realm name: " + currRealmName);
}
} else if ("users".equals(parser.getText())) {
parser.nextToken();
if (parser.getCurrentToken() == JsonToken.START_ARRAY) {
parser.nextToken();
}
// TODO: support for more transactions per single users file (if needed)
List<UserRepresentation> userReps = new ArrayList<UserRepresentation>();
while (parser.getCurrentToken() == JsonToken.START_OBJECT) {
UserRepresentation user = parser.readValueAs(UserRepresentation.class);
userReps.add(user);
parser.nextToken();
}
importUsers(session, model, realmName, userReps);
if (parser.getCurrentToken() == JsonToken.END_ARRAY) {
parser.nextToken();
}
}
}
} finally {
parser.close();
}
}
private static void importUsers(KeycloakSession session, RealmProvider model, String realmName, List<UserRepresentation> userReps) {
RealmModel realm = model.getRealmByName(realmName);
Map<String, ClientModel> apps = realm.getClientNameMap();
for (UserRepresentation user : userReps) {
RepresentationToModel.createUser(session, realm, user, apps);
}
}
}
| |
/*
* Copyright (c) 2016 Uber Technologies, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.uber.sdk.android.rides;
import android.content.Context;
import android.net.Uri;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.browser.customtabs.CustomTabsIntent;
import com.uber.sdk.android.core.Deeplink;
import com.uber.sdk.android.core.utils.AppProtocol;
import com.uber.sdk.android.core.utils.CustomTabsHelper;
import com.uber.sdk.core.client.SessionConfiguration;
import static com.uber.sdk.android.core.SupportedAppType.UBER;
import static com.uber.sdk.android.core.utils.Preconditions.checkNotNull;
/**
* A deeplink for requesting rides in the Uber application.
*
* @see <a href="https://developer.uber.com/v1/deep-linking/">Uber deeplink documentation</a>
*/
public class RideRequestDeeplink implements Deeplink {
private static final String USER_AGENT_DEEPLINK = String.format("rides-android-v%s-deeplink",
BuildConfig.VERSION_NAME);
@NonNull
private final Uri uri;
@NonNull
private final Context context;
@NonNull
private final AppProtocol appProtocol;
@NonNull
private final CustomTabsHelper customTabsHelper;
RideRequestDeeplink(@NonNull Context context,
@NonNull Uri uri,
@NonNull AppProtocol appProtocol,
@NonNull CustomTabsHelper customTabsHelper) {
this.uri = uri;
this.context = context;
this.appProtocol = appProtocol;
this.customTabsHelper = customTabsHelper;
}
/**
* Executes the deeplink to launch the Uber app. If the app is not installed redirects to the play store.
*/
public void execute() {
final CustomTabsIntent intent = new CustomTabsIntent.Builder().build();
customTabsHelper.openCustomTab(context, intent, uri, new CustomTabsHelper.BrowserFallback());
}
@Override
public boolean isSupported() {
return appProtocol.isInstalled(context, UBER);
}
/**
* @return The {@link Uri} for the deeplink.
*/
@NonNull
public Uri getUri() {
return uri;
}
/**
* Builder for {@link RideRequestDeeplink} objects.
*/
public static class Builder {
public static final String AUTHORITY = "riderequest";
public static final String ACTION = "action";
public static final String SET_PICKUP = "setPickup";
public static final String CLIENT_ID = "client_id";
public static final String PRODUCT_ID = "product_id";
public static final String MY_LOCATION = "my_location";
public static final String LATITUDE = "[latitude]";
public static final String LONGITUDE = "[longitude]";
public static final String NICKNAME = "[nickname]";
public static final String FORMATTED_ADDRESS = "[formatted_address]";
public static final String USER_AGENT = "user-agent";
private RideParameters rideParameters;
private SessionConfiguration sessionConfiguration;
private Fallback fallback = Fallback.APP_INSTALL;
private final Context context;
private AppProtocol appProtocol;
private CustomTabsHelper customTabsHelper;
/**
* @param context to execute the deeplink.
*/
public Builder(Context context) {
this.context = context;
}
/**
* Sets the {@link RideParameters} for the deeplink.
*
* @return this instance of {@link Builder}
*/
public RideRequestDeeplink.Builder setRideParameters(@NonNull RideParameters rideParameters) {
this.rideParameters = rideParameters;
return this;
}
/**
* Sets the client Id
*
* @return this instance of {@link Builder}
*/
public RideRequestDeeplink.Builder setSessionConfiguration(@NonNull SessionConfiguration
sessionConfiguration) {
this.sessionConfiguration = sessionConfiguration;
return this;
}
/**
* Sets the fallback to use when the Uber app isn't installed.
*
* @return this instance of {@link Builder}
*/
public RideRequestDeeplink.Builder setFallback(@NonNull Fallback fallback) {
this.fallback = fallback;
return this;
}
@VisibleForTesting
RideRequestDeeplink.Builder setCustomTabsHelper(@NonNull CustomTabsHelper customTabsHelper) {
this.customTabsHelper = customTabsHelper;
return this;
}
@VisibleForTesting
RideRequestDeeplink.Builder setAppProtocol(@NonNull AppProtocol appProtocol){
this.appProtocol = appProtocol;
return this;
}
/**
* Builds an {@link RideRequestDeeplink} object.
*
* @return {@link RideRequestDeeplink} generated from parameters
*/
@NonNull
public RideRequestDeeplink build() {
checkNotNull(rideParameters, "Must supply ride parameters.");
checkNotNull(sessionConfiguration, "Must supply a Session Configuration");
checkNotNull(sessionConfiguration.getClientId(), "Must supply client Id on Login Configuration");
if (appProtocol == null) {
appProtocol = new AppProtocol();
}
if (customTabsHelper == null) {
customTabsHelper = new CustomTabsHelper();
}
final Uri.Builder builder = getUriBuilder(context, fallback);
builder.appendQueryParameter(ACTION, SET_PICKUP);
builder.appendQueryParameter(CLIENT_ID, sessionConfiguration.getClientId());
if (rideParameters.getProductId() != null) {
builder.appendQueryParameter(PRODUCT_ID, rideParameters.getProductId());
}
if (rideParameters.getPickupLatitude() != null && rideParameters.getPickupLongitude() != null) {
addLocation(LocationType.PICKUP, Double.toString(rideParameters.getPickupLatitude()),
Double.toString(rideParameters.getPickupLongitude()), rideParameters.getPickupNickname(),
rideParameters.getPickupAddress(), builder);
}
if (rideParameters.isPickupMyLocation()) {
builder.appendQueryParameter(LocationType.PICKUP.getUriQueryKey(), MY_LOCATION);
}
if (rideParameters.getDropoffLatitude() != null && rideParameters.getDropoffLongitude() != null) {
addLocation(LocationType.DROPOFF, Double.toString(rideParameters.getDropoffLatitude()),
Double.toString(rideParameters.getDropoffLongitude()), rideParameters.getDropoffNickname(),
rideParameters.getDropoffAddress(), builder);
}
String userAgent = rideParameters.getUserAgent();
if (userAgent == null) {
userAgent = USER_AGENT_DEEPLINK;
}
builder.appendQueryParameter(USER_AGENT, userAgent);
return new RideRequestDeeplink(context, builder.build(), appProtocol, customTabsHelper);
}
private void addLocation(
@NonNull LocationType locationType, @NonNull String latitude,
@NonNull String longitude, @Nullable String nickname, @Nullable String address, Uri.Builder builder) {
String typeQueryKey = locationType.getUriQueryKey();
builder.appendQueryParameter(typeQueryKey + LATITUDE, latitude);
builder.appendQueryParameter(typeQueryKey + LONGITUDE, longitude);
if (nickname != null) {
builder.appendQueryParameter(typeQueryKey + NICKNAME, nickname);
}
if (address != null) {
builder.appendQueryParameter(typeQueryKey + FORMATTED_ADDRESS, address);
}
}
Uri.Builder getUriBuilder(@NonNull Context context, @NonNull Deeplink.Fallback
fallback) {
final Uri.Builder builder;
if (appProtocol.isInstalled(context, UBER)) {
if (appProtocol.isAppLinkSupported()) {
builder = Uri.parse(Deeplink.APP_LINK_URI).buildUpon();
} else {
builder = new Uri.Builder()
.scheme(Deeplink.DEEPLINK_SCHEME);
}
} else {
if (fallback == Deeplink.Fallback.MOBILE_WEB) {
builder = Uri.parse(Deeplink.MOBILE_WEB_URI).buildUpon();
} else {
builder = Uri.parse(Deeplink.APP_LINK_URI).buildUpon();
}
}
return builder;
}
}
private enum LocationType {
PICKUP,
DROPOFF;
private String getUriQueryKey() {
return name().toLowerCase();
}
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2011-2016 Incapture Technologies LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rapture.script.reflex;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.CommonTreeNodeStream;
import org.apache.log4j.Logger;
import rapture.common.CallingContext;
import rapture.common.RaptureParameterType;
import rapture.common.RaptureScript;
import rapture.common.ScriptResult;
import rapture.common.api.ScriptingApi;
import rapture.common.impl.jackson.JacksonUtil;
import rapture.config.ConfigLoader;
import rapture.index.IndexHandler;
import rapture.kernel.Kernel;
import rapture.kernel.pipeline.PipelineReflexSuspendHandler;
import rapture.kernel.script.KernelScript;
import rapture.log.MDCService;
import rapture.script.IActivityInfo;
import rapture.script.IRaptureScript;
import rapture.script.RaptureDataContext;
import rapture.script.ScriptRunInfoCollector;
import reflex.AddingOutputReflexHandler;
import reflex.DummyReflexOutputHandler;
import reflex.IReflexHandler;
import reflex.IReflexOutputHandler;
import reflex.MetaParam;
import reflex.MetaReturn;
import reflex.MetaScriptInfo;
import reflex.ReflexException;
import reflex.ReflexExecutor;
import reflex.ReflexLexer;
import reflex.ReflexParser;
import reflex.ReflexTreeWalker;
import reflex.Scope;
import reflex.debug.NullDebugger;
import reflex.node.ReflexNode;
import reflex.value.ReflexValue;
import reflex.value.internal.ReflexNullValue;
import reflex.value.internal.ReflexVoidValue;
/**
* Provides an execution context for Rapture Reflex Scripts
*
* @author amkimian
*/
public class ReflexRaptureScript implements IRaptureScript {
private static Logger log = Logger.getLogger(ReflexRaptureScript.class);
private static final String EXCEPTION = "exception";
private String auditLogUri = null;
public void setAuditLogUri(String uri) {
auditLogUri = uri;
}
private void addContextScope(ReflexTreeWalker walker, CallingContext context) {
walker.currentScope.assign("_ctx", context == null ? new ReflexNullValue() : new ReflexValue(context));
KernelScript kh = new KernelScript();
kh.setCallingContext(context);
walker.getReflexHandler().setApi(kh);
// walker.currentScope.assign("_rk", new ReflexValue(kh));
walker.currentScope.assign("_cfg", new ReflexValue(ConfigLoader.getConf()));
// addStandard(walker, context, kh);
}
private void addObjectExtra(ReflexTreeWalker walker, Map<String, ?> extra) {
walker.currentScope.assign("_params", new ReflexValue(extra));
}
private ReflexTreeWalker getParserWithStandardContext(CallingContext context, String script, Map<String, ?> extra) throws RecognitionException {
ReflexTreeWalker walker = getStandardWalker(context, script);
if (extra != null && !extra.isEmpty()) {
addObjectExtra(walker, extra);
}
addContextScope(walker, context);
return walker;
}
public ReflexParser getParser(CallingContext ctx, String script) throws RecognitionException {
ReflexLexer lexer = new ReflexLexer();
lexer.dataHandler = new ReflexIncludeHelper(ctx);
lexer.setCharStream(new ANTLRStringStream(script));
CommonTokenStream tokens = new CommonTokenStream(lexer);
ReflexParser parser = new ReflexParser(tokens);
parser.parse();
return parser;
}
private ReflexTreeWalker getStandardWalker(CallingContext ctx, String script) throws RecognitionException {
ReflexLexer lexer = new ReflexLexer();
lexer.dataHandler = new ReflexIncludeHelper(ctx);
lexer.setCharStream(new ANTLRStringStream(script));
CommonTokenStream tokens = new CommonTokenStream(lexer);
ReflexParser parser = new ReflexParser(tokens);
CommonTree tree;
tree = (CommonTree) parser.parse().getTree();
CommonTreeNodeStream nodes = new CommonTreeNodeStream(tree);
ReflexTreeWalker walker = new ReflexTreeWalker(nodes, parser.languageRegistry);
walker.setReflexHandler(new AddingOutputReflexHandler());
walker.getReflexHandler().setOutputHandler(new SimpleCollectingOutputHandler());
walker.getReflexHandler().setOutputHandler(new DummyReflexOutputHandler());
walker.getReflexHandler().setDataHandler(new ReflexDataHelper(ctx));
walker.getReflexHandler().setIOHandler(new BlobOnlyIOHandler());
walker.setScriptInfo(parser.scriptInfo);
return walker;
}
@Override
public boolean runFilter(CallingContext context, RaptureScript script, RaptureDataContext data, Map<String, Object> parameters) {
// A filter is basically a program that returns true or false. No return
// is equivalent to false
try {
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), parameters);
ReflexNode res = walker.walk();
return res.evaluateWithoutScope(new NullDebugger()).asBoolean();
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
@Override
public void runIndexEntry(CallingContext context, RaptureScript script, IndexHandler indexHandler, RaptureDataContext data) {
try {
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), null);
walker.currentScope.assign("_data", new ReflexValue(JacksonUtil.getHashFromObject(data)));
walker.currentScope.assign("_index", new ReflexValue(indexHandler));
ReflexNode res = walker.walk();
res.evaluateWithoutScope(new NullDebugger());
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
@Override
public List<Object> runMap(CallingContext context, RaptureScript script, RaptureDataContext data, Map<String, Object> parameters) {
try {
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), parameters);
walker.currentScope.assign("_data", new ReflexValue(JacksonUtil.getHashFromObject(data)));
ReflexNode res = walker.walk();
List<ReflexValue> ret = res.evaluateWithoutScope(new NullDebugger()).asList();
List<Object> realRet = new ArrayList<>(ret.size());
for (ReflexValue v : ret) {
realRet.add(v.asObject());
}
return realRet;
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
@Override
public String runOperation(CallingContext context, RaptureScript script, String ctx, Map<String, Object> params) {
try {
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), params);
walker.currentScope.assign("_ctx", new ReflexValue(ctx));
ReflexNode res = walker.walk();
return res.evaluateWithoutScope(new NullDebugger()).toString();
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
@Override
public Object runProgram(CallingContext context, IActivityInfo activity, RaptureScript script, Map<String, Object> extraVals) {
return runProgram(context, activity, script, extraVals, -1);
}
// how many lines to show (if possible) each side of the error
private static final int DEBUG_CONTEXT = 5;
private String getErrorInfo(String message, RaptureScript script, int lineNum, int posInLine) {
StringBuilder msg = new StringBuilder();
msg.append(message).append(" in script ").append(script.getName()).append("\n");
if (lineNum > 0) {
String[] lines = script.getScript().split("\n");
int start = (lineNum > DEBUG_CONTEXT) ? lineNum - DEBUG_CONTEXT : 0;
int end = lineNum + DEBUG_CONTEXT;
if (end > lines.length) end = lines.length;
while (start < end) {
String l = lines[start++];
msg.append(start).append(": ").append(l).append("\n");
if (start == lineNum) {
for (int i = -4; i < posInLine; i++)
msg.append("-");
for (int i = posInLine; i < l.length(); i++)
msg.append("^");
}
}
}
return msg.toString();
}
public Object runProgram(CallingContext context, IActivityInfo activity, RaptureScript script, Map<String, Object> extraVals, int timeout) {
if (script == null) {
log.info("in runProgram: RaptureScript is null");
return null;
} else try {
MDCService.INSTANCE.setReflexMDC(script.getName());
ScriptRunInfoCollector collector = ScriptRunInfoCollector.createServerCollector(context, script.getAddressURI().getFullPath());
log.info("Running script " + getScriptName(script));
ScriptResult res = _doRunProgram(context, activity, script, extraVals, -1, ScriptRunInfoCollector.createServerCollector(context, "remote"));
if (auditLogUri != null) {
Kernel.getAudit().writeAuditEntry(context, auditLogUri, "debug", 1, collector.getBlobContent());
}
return res.getReturnValue();
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
} catch (ReflexException e) {
throw new ReflexException(e.getLineNumber(), getErrorInfo(e.getMessage(), script, e.getLineNumber(), 0), e);
} finally {
MDCService.INSTANCE.clearReflexMDC();
}
}
private String getScriptName(RaptureScript script) {
if (script.getAuthority() == null) {
return script.getName();
} else {
return script.getStorageLocation().toString();
}
}
@Override
public ScriptResult runProgramExtended(CallingContext context, IActivityInfo activity, RaptureScript script, Map<String, Object> params) {
ScriptResult res = new ScriptResult();
try {
res = _doRunProgram(context, activity, script, params, -1, ScriptRunInfoCollector.createServerCollector(context, "remote"));
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
} catch (ReflexException e) {
res.setInError(true);
res.setReturnValue(getErrorInfo(e.getMessage(), script, e.getLineNumber(), 0));
res.getOutput().add("Error when running script");
}
return res;
}
private ScriptResult _doRunProgram(CallingContext context, IActivityInfo activity, RaptureScript script, Map<String, Object> params, int timeout,
ScriptRunInfoCollector collector) throws RecognitionException, ReflexException {
ScriptResult res = new ScriptResult();
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), params);
ProgressDebugger progress = (timeout > 0) ? new TimeoutReflexDebugger(activity, script.getScript(), timeout)
: new ProgressDebugger(activity, script.getScript());
// Setup an alternate output handler, and a standard data handler
walker.getReflexHandler().setDataHandler(new ReflexDataHelper(context));
walker.getReflexHandler().setOutputHandler(new ScriptResultOutputHandler(res));
ReflexNode execRes = walker.walk();
ReflexExecutor.injectSystemIntoScope(walker.currentScope);
MetaScriptInfo scriptInfo = walker.getScriptInfo();
for (Map.Entry<String, Object> entry : params.entrySet()) {
String key = entry.getKey();
String value = entry.getValue().toString();
ReflexValue v;
MetaParam param = scriptInfo.getParameter(key);
// If a parameter type is not defined then it's a String
RaptureParameterType type = (param == null) ? RaptureParameterType.STRING : param.getParameterType();
switch (type) {
case STRING:
default:
v = new ReflexValue(value);
break;
case VOID:
v = new ReflexVoidValue(-1);
break;
case NUMBER:
v = new ReflexValue(new BigDecimal(value.toString()));
break;
case INTEGER:
v = new ReflexValue(new BigDecimal(value.toString()).intValue());
break;
case BOOLEAN:
v = new ReflexValue(Boolean.valueOf(value.toString()));
break;
case MAP:
v = new ReflexValue(JacksonUtil.getMapFromJson(value.toString()));
break;
case LIST:
List<ReflexValue> list = JacksonUtil.objectFromJson(value.toString(), ArrayList.class);
v = new ReflexValue(list);
break;
}
walker.currentScope.assign(key, v);
}
// TODO replace this with abortable invocation
if (timeout > 0) log.info("Warning: script is not abortable");
ReflexValue val = execRes.evaluateWithoutScope(progress);
progress.getInstrumenter().log();
if (walker.getReflexHandler() instanceof AddingOutputReflexHandler) {
AddingOutputReflexHandler aorf = (AddingOutputReflexHandler) walker.getReflexHandler();
SimpleCollectingOutputHandler sc = aorf.getOutputHandlerLike(SimpleCollectingOutputHandler.class);
collector.addOutput(sc.getLog());
}
collector.addInstrumentationLog(progress.getInstrumenter().getTextLog());
// Now record collector
ScriptCollectorHelper.writeCollector(context, collector);
// Meta block specifies the return type
MetaReturn mri = scriptInfo.getReturnInfo();
RaptureParameterType type = (mri == null) ? RaptureParameterType.STRING : mri.getType();
switch (type) {
case STRING:
default:
res.setReturnValue(val.asString());
break;
case VOID:
res.setReturnValue(null);
break;
case NUMBER:
if (val.isNumber()) res.setReturnValue(val.asBigDecimal());
else res.setReturnValue(new BigDecimal(val.asString()));
break;
case INTEGER:
if (val.isInteger()) res.setReturnValue(val.asInt());
else res.setReturnValue(new BigDecimal(val.asString()).intValue());
break;
case BOOLEAN:
if (val.isBoolean()) res.setReturnValue(val.asBoolean());
else res.setReturnValue(Boolean.valueOf(val.asString()));
break;
case MAP:
if (val.isMap()) res.setReturnValue(val.asMap());
else res.setReturnValue(JacksonUtil.getMapFromJson(val.asString()));
break;
case LIST:
if (val.isList()) res.setReturnValue(val.asList());
else res.setReturnValue(JacksonUtil.objectFromJson(val.asString(), ArrayList.class));
break;
}
return res;
}
public String runProgramWithScope(CallingContext context, String script, Scope s) throws RecognitionException {
IReflexHandler handler = new ReflexHandler(context);
ReflexTreeWalker walker = ReflexExecutor.getWalkerForProgram(script, handler);
walker.setReflexHandler(handler);
final StringBuilder sb = new StringBuilder();
walker.getReflexHandler().setOutputHandler(new IReflexOutputHandler() {
@Override
public boolean hasCapability() {
return true;
}
@Override
public void printLog(String text) {
sb.append(text);
}
@Override
public void printOutput(String text) {
sb.append(text);
}
@Override
public void setApi(ScriptingApi api) {
}
});
ReflexNode res = walker.walk();
res.evaluate(new NullDebugger(), s);
s = walker.currentScope;
return sb.toString();
}
public ReflexValue runProgram(CallingContext context, ReflexTreeWalker walker, IActivityInfo activity, Map<String, Object> extraVals,
RaptureScript script) {
walker.getReflexHandler().setDataHandler(new ReflexDataHelper(context));
walker.currentScope.assign("_params", new ReflexValue(extraVals));
ReflexNode res;
try {
res = walker.walk();
return res.evaluateWithoutScope(new ProgressDebugger(activity, ""));
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
public String runProgramWithSuspend(CallingContext context, RaptureScript script, IActivityInfo activity, Map<String, Object> extraVals,
PipelineReflexSuspendHandler suspendHandler, IReflexOutputHandler outputHandler) {
try {
ScriptResult result = new ScriptResult();
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), extraVals);
walker.getReflexHandler().setSuspendHandler(suspendHandler);
if (outputHandler != null) {
walker.getReflexHandler().setOutputHandler(outputHandler);
} else {
walker.getReflexHandler().setOutputHandler(new ScriptResultOutputHandler(result));
}
ReflexNode res = walker.walk();
ProgressDebugger progress = new ProgressDebugger(activity, script.getScript());
String scriptName = getScriptName(script);
if (scriptName == null) {
log.info("Running anonymous Reflex script");
} else {
log.info(String.format("Running script with name '%s'", scriptName));
}
ReflexValue retVal = res.evaluateWithoutScope(progress);
for (Map.Entry<String, Object> val : extraVals.entrySet()) {
ReflexValue v = walker.currentScope.resolve(val.getKey());
if (v != null && v.getValue() != ReflexValue.Internal.VOID && v.getValue() != ReflexValue.Internal.NULL) {
val.setValue(v.asObject());
}
}
progress.getInstrumenter().log();
return retVal.toString();
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
public String runProgramWithResume(CallingContext context, RaptureScript script, IActivityInfo activity, Map<String, Object> extraVals,
PipelineReflexSuspendHandler suspendHandler, IReflexOutputHandler outputHandler, String scopeContext) {
try {
ReflexTreeWalker walker = getParserWithStandardContext(context, script.getScript(), extraVals);
walker.getReflexHandler().setSuspendHandler(suspendHandler);
walker.getReflexHandler().setOutputHandler(outputHandler);
ReflexNode res = walker.walk();
Scope scope = JacksonUtil.objectFromJson(scopeContext, Scope.class);
ProgressDebugger progress = new ProgressDebugger(activity, script.getScript());
log.info("Running script " + getScriptName(script));
res.evaluateWithResume(progress, scope);
for (Map.Entry<String, Object> val : extraVals.entrySet()) {
ReflexValue v = walker.currentScope.resolve(val.getKey());
if (v != null && v.getValue() != ReflexValue.Internal.VOID && v.getValue() != ReflexValue.Internal.NULL) {
val.setValue(v.asObject());
}
}
progress.getInstrumenter().log();
return JacksonUtil.jsonFromObject(res.getScope());
} catch (RecognitionException e) {
String message = getErrorInfo(e.getMessage(), script, e.line, e.charPositionInLine);
Kernel.writeAuditEntry(EXCEPTION, 2, message);
throw new ReflexException(e.line, message, e);
}
}
@Override
public String validateProgram(CallingContext context, RaptureScript script) {
try {
// We call this as it parses the program and throws an exception if
// the script
// is not parseable.
getStandardWalker(context, script.getScript());
} catch (Exception e) {
e.printStackTrace();
if (e.getMessage() == null) {
return e.getClass().toString();
} else {
return e.getMessage();
}
}
return "";
}
}
| |
/*
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.ticket.registry.support.kryo;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.DefaultSerializers;
import de.javakaffee.kryoserializers.UnmodifiableCollectionsSerializer;
import de.javakaffee.kryoserializers.jodatime.JodaDateTimeSerializer;
import net.spy.memcached.CachedData;
import net.spy.memcached.transcoders.Transcoder;
import org.apache.commons.io.IOUtils;
import org.jasig.cas.authentication.BasicCredentialMetaData;
import org.jasig.cas.authentication.HandlerResult;
import org.jasig.cas.authentication.ImmutableAuthentication;
import org.jasig.cas.authentication.principal.SimpleWebApplicationServiceImpl;
import org.jasig.cas.services.RegexRegisteredService;
import org.jasig.cas.services.RegisteredServiceImpl;
import org.jasig.cas.ticket.ServiceTicketImpl;
import org.jasig.cas.ticket.TicketGrantingTicketImpl;
import org.jasig.cas.ticket.registry.support.kryo.serial.RegisteredServiceSerializer;
import org.jasig.cas.ticket.registry.support.kryo.serial.SimpleWebApplicationServiceSerializer;
import org.jasig.cas.ticket.registry.support.kryo.serial.URLSerializer;
import org.jasig.cas.ticket.support.HardTimeoutExpirationPolicy;
import org.jasig.cas.ticket.support.MultiTimeUseOrTimeoutExpirationPolicy;
import org.jasig.cas.ticket.support.NeverExpiresExpirationPolicy;
import org.jasig.cas.ticket.support.RememberMeDelegatingExpirationPolicy;
import org.jasig.cas.ticket.support.ThrottledUseAndTimeoutExpirationPolicy;
import org.jasig.cas.ticket.support.TicketGrantingTicketExpirationPolicy;
import org.jasig.cas.ticket.support.TimeoutExpirationPolicy;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* {@link net.spy.memcached.MemcachedClient} transcoder implementation based on Kryo fast serialization framework
* suited for efficient serialization of tickets.
*
* @author Marvin S. Addison
*/
@SuppressWarnings("rawtypes")
public class KryoTranscoder implements Transcoder<Object> {
/** Kryo serializer. */
private final Kryo kryo = new Kryo();
/** Logging instance. */
private final Logger logger = LoggerFactory.getLogger(getClass());
/** Map of class to serializer that handles it. */
private Map<Class<?>, Serializer> serializerMap;
/**
* Creates a Kryo-based transcoder.
*/
public KryoTranscoder() {
}
/**
* @deprecated
* Creates a Kryo-based transcoder.
*
* @param initialBufferSize Initial size for buffer holding encoded object data.
*/
@Deprecated
public KryoTranscoder(final int initialBufferSize) {
logger.warn("It's no longer necessary to define the initialBufferSize. Use the empty constructor.");
}
/**
* Sets a map of additional types that should be regisetered with Kryo,
* for example GoogleAccountsService and OpenIdService.
*
* @param map Map of class to the serializer instance that handles it.
*/
public void setSerializerMap(final Map<Class<?>, Serializer> map) {
this.serializerMap = map;
}
/**
* Initialize and register classes with kryo.
*/
public void initialize() {
// Register types we know about and do not require external configuration
kryo.register(ArrayList.class);
kryo.register(BasicCredentialMetaData.class);
kryo.register(Class.class, new DefaultSerializers.ClassSerializer());
kryo.register(Date.class, new DefaultSerializers.DateSerializer());
kryo.register(HardTimeoutExpirationPolicy.class);
kryo.register(HashMap.class);
kryo.register(HandlerResult.class);
kryo.register(ImmutableAuthentication.class);
kryo.register(MultiTimeUseOrTimeoutExpirationPolicy.class);
kryo.register(NeverExpiresExpirationPolicy.class);
kryo.register(RememberMeDelegatingExpirationPolicy.class);
kryo.register(ServiceTicketImpl.class);
kryo.register(SimpleWebApplicationServiceImpl.class, new SimpleWebApplicationServiceSerializer());
kryo.register(ThrottledUseAndTimeoutExpirationPolicy.class);
kryo.register(TicketGrantingTicketExpirationPolicy.class);
kryo.register(TicketGrantingTicketImpl.class);
kryo.register(TimeoutExpirationPolicy.class);
kryo.register(URL.class, new URLSerializer());
// we add these ones for tests only
kryo.register(RegisteredServiceImpl.class, new RegisteredServiceSerializer());
kryo.register(RegexRegisteredService.class, new RegisteredServiceSerializer());
// new serializers to manage Joda dates and immutable collections
kryo.register(DateTime.class, new JodaDateTimeSerializer());
// from the kryo-serializers library (https://github.com/magro/kryo-serializers)
UnmodifiableCollectionsSerializer.registerSerializers(kryo);
// Register other types
if (serializerMap != null) {
for (final Map.Entry<Class<?>, Serializer> clazz : serializerMap.entrySet()) {
kryo.register(clazz.getKey(), clazz.getValue());
}
}
// don't reinit the registered classes after every write or read
kryo.setAutoReset(false);
// don't replace objects by references
kryo.setReferences(false);
// Catchall for any classes not explicitly registered
kryo.setRegistrationRequired(false);
}
/**
* Asynchronous decoding is not supported.
*
* @param d Data to decode.
* @return False.
*/
public boolean asyncDecode(final CachedData d) {
return false;
}
@Override
public CachedData encode(final Object obj) {
final ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
final Output output = new Output(byteStream);
kryo.writeClassAndObject(output, obj);
output.flush();
IOUtils.closeQuietly(output);
final byte[] bytes = byteStream.toByteArray();
return new CachedData(0, bytes, bytes.length);
}
@Override
public Object decode(final CachedData d) {
final byte[] bytes = d.getData();
final Input input = new Input(new ByteArrayInputStream(bytes));
final Object obj = kryo.readClassAndObject(input);
IOUtils.closeQuietly(input);
return obj;
}
/**
* Maximum size of encoded data supported by this transcoder.
*
* @return <code>net.spy.memcached.CachedData#MAX_SIZE</code>.
*/
public int getMaxSize() {
return CachedData.MAX_SIZE;
}
/**
* Gets the kryo object that provides encoding and decoding services for this instance.
*
* @return Underlying Kryo instance.
*/
public Kryo getKryo() {
return kryo;
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event.listener;
import static com.facebook.buck.event.TestEventConfigurator.configureTestEventAtTime;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.artifact_cache.CacheResult;
import com.facebook.buck.artifact_cache.HttpArtifactCacheEvent;
import com.facebook.buck.distributed.DistBuildCreatedEvent;
import com.facebook.buck.distributed.thrift.StampedeId;
import com.facebook.buck.event.ActionGraphEvent;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusForTests;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.event.InstallEvent;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.parser.ParseEvent;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.BuildRuleDurationTracker;
import com.facebook.buck.rules.BuildRuleEvent;
import com.facebook.buck.rules.BuildRuleKeys;
import com.facebook.buck.rules.BuildRuleStatus;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.test.TestResultSummaryVerbosity;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.environment.DefaultExecutionEnvironment;
import com.facebook.buck.util.timing.Clock;
import com.facebook.buck.util.timing.IncrementingFakeClock;
import com.facebook.buck.util.unit.SizeUnit;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import java.nio.file.FileSystem;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
public class SimpleConsoleEventBusListenerTest {
private static final StampedeId STAMPEDE_ID_ONE = new StampedeId().setId("stampedeIdOne");
private static final String STAMPEDE_ID_ONE_MESSAGE = "StampedeId=[stampedeIdOne]\n";
private static final String TARGET_ONE = "TARGET_ONE";
private static final String TARGET_TWO = "TARGET_TWO";
private static final String SEVERE_MESSAGE = "This is a sample severe message.";
private static final String FINISHED_DOWNLOAD_STRING = "DOWNLOADED 0 ARTIFACTS, 0.00 BYTES";
private BuildRuleDurationTracker durationTracker;
private Clock fakeClock;
private BuckEventBus eventBus;
private TestConsole console;
Path logPath;
@Before
public void setUp() {
FileSystem vfs = Jimfs.newFileSystem(Configuration.unix());
logPath = vfs.getPath("log.txt");
durationTracker = new BuildRuleDurationTracker();
fakeClock = new IncrementingFakeClock(TimeUnit.SECONDS.toNanos(1));
eventBus = BuckEventBusForTests.newInstance(fakeClock);
console = new TestConsole();
}
@Test
public void testSimpleBuild() {
setupSimpleConsole(false);
String expectedOutput = "";
assertOutput(expectedOutput, console);
BuildTarget fakeTarget = BuildTargetFactory.newInstance("//banana:stand");
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(fakeTarget);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
FakeBuildRule fakeRule = new FakeBuildRule(fakeTarget, ImmutableSortedSet.of());
final long threadId = 0;
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(buildEventStarted, 0L, TimeUnit.MILLISECONDS, threadId));
ParseEvent.Started parseStarted = ParseEvent.started(buildTargets);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(parseStarted, 0L, TimeUnit.MILLISECONDS, threadId));
assertOutput(expectedOutput, console);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
ParseEvent.finished(parseStarted, 10, Optional.empty()),
400L,
TimeUnit.MILLISECONDS,
threadId));
expectedOutput += "PARSING BUCK FILES: FINISHED IN 0.4s\n";
assertOutput(expectedOutput, console);
BuildRuleEvent.Started started = BuildRuleEvent.started(fakeRule, durationTracker);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(started, 600L, TimeUnit.MILLISECONDS, threadId));
HttpArtifactCacheEvent.Scheduled storeScheduledOne =
ArtifactCacheTestUtils.postStoreScheduled(eventBus, threadId, TARGET_ONE, 700L);
HttpArtifactCacheEvent.Scheduled storeScheduledTwo =
ArtifactCacheTestUtils.postStoreScheduled(eventBus, threadId, TARGET_TWO, 700L);
HttpArtifactCacheEvent.Started storeStartedOne =
ArtifactCacheTestUtils.postStoreStarted(eventBus, threadId, 710L, storeScheduledOne);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildRuleEvent.finished(
started,
BuildRuleKeys.of(new RuleKey("aaaa")),
BuildRuleStatus.SUCCESS,
CacheResult.miss(),
Optional.empty(),
Optional.of(BuildRuleSuccessType.BUILT_LOCALLY),
false,
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty()),
1000L,
TimeUnit.MILLISECONDS,
threadId));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildEvent.finished(buildEventStarted, ExitCode.SUCCESS),
1234L,
TimeUnit.MILLISECONDS,
threadId));
expectedOutput +=
"BUILT 0.4s //banana:stand\n"
+ FINISHED_DOWNLOAD_STRING
+ "\n"
+ "BUILDING: FINISHED IN 1.2s\n"
+ "WAITING FOR HTTP CACHE UPLOADS 0.00 BYTES (0 COMPLETE/0 FAILED/1 UPLOADING/1 PENDING)\n"
+ "BUILD SUCCEEDED\n";
assertOutput(expectedOutput, console);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
ConsoleEvent.severe(SEVERE_MESSAGE), 1500L, TimeUnit.MILLISECONDS, threadId));
expectedOutput += SEVERE_MESSAGE + "\n";
assertOutput(expectedOutput, console);
InstallEvent.Started installEventStarted =
configureTestEventAtTime(
InstallEvent.started(fakeTarget), 2500L, TimeUnit.MILLISECONDS, threadId);
eventBus.postWithoutConfiguring(installEventStarted);
assertOutput(expectedOutput, console);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
InstallEvent.finished(installEventStarted, true, Optional.empty(), Optional.empty()),
4000L,
TimeUnit.MILLISECONDS,
threadId));
expectedOutput += "INSTALLING: FINISHED IN 1.5s\n";
assertOutput(expectedOutput, console);
long artifactSizeOne = SizeUnit.MEGABYTES.toBytes(1.5);
ArtifactCacheTestUtils.postStoreFinished(
eventBus, threadId, artifactSizeOne, 5015L, true, storeStartedOne);
HttpArtifactCacheEvent.Started storeStartedTwo =
ArtifactCacheTestUtils.postStoreStarted(eventBus, threadId, 5020L, storeScheduledTwo);
long artifactSizeTwo = 600;
ArtifactCacheTestUtils.postStoreFinished(
eventBus, threadId, artifactSizeTwo, 5020L, false, storeStartedTwo);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
HttpArtifactCacheEvent.newShutdownEvent(), 6000L, TimeUnit.MILLISECONDS, threadId));
expectedOutput +=
"HTTP CACHE UPLOAD: FINISHED 1.50 MBYTES (1 COMPLETE/1 FAILED/0 UPLOADING/0 PENDING)\n";
assertOutput(expectedOutput, console);
}
@Test
public void testJobSummaryIsDisplayed() {
setupSimpleConsole(false);
String expectedOutput = "";
assertOutput(expectedOutput, console);
BuildEvent.RuleCountCalculated ruleCountCalculated =
BuildEvent.ruleCountCalculated(ImmutableSet.of(), 10);
eventBus.post(ruleCountCalculated);
BuildTarget fakeTarget = BuildTargetFactory.newInstance("//banana:stand");
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(fakeTarget);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
buildEventStarted, 200L, TimeUnit.MILLISECONDS, /* threadId */ 0L));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildEvent.finished(buildEventStarted, ExitCode.SUCCESS),
1234L,
TimeUnit.MILLISECONDS,
/* threadId */ 0L));
expectedOutput += "BUILDING: FINISHED IN 1.0s 0/10 JOBS, 0 UPDATED, 0.0% CACHE MISS\n";
expectedOutput += "BUILD SUCCEEDED\n";
assertOutput(FINISHED_DOWNLOAD_STRING + "\n" + expectedOutput, console);
}
@Test
public void testBuildTimeDoesNotDisplayNegativeOffset() {
setupSimpleConsole(false);
String expectedOutput = "";
assertOutput(expectedOutput, console);
BuildTarget fakeTarget = BuildTargetFactory.newInstance("//banana:stand");
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(fakeTarget);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
// Do a full parse and action graph cycle before the build event starts
// This sequencing occurs when running `buck project`
ParseEvent.Started parseStarted = ParseEvent.started(buildTargets);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(parseStarted, 100L, TimeUnit.MILLISECONDS, /* threadId */ 0L));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
ParseEvent.finished(parseStarted, 10, Optional.empty()),
300L,
TimeUnit.MILLISECONDS,
/* threadId */ 0L));
expectedOutput += "PARSING BUCK FILES: FINISHED IN 0.2s\n";
assertOutput(expectedOutput, console);
ActionGraphEvent.Started actionGraphStarted = ActionGraphEvent.started();
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
actionGraphStarted, 300L, TimeUnit.MILLISECONDS, /* threadId */ 0L));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
ActionGraphEvent.finished(actionGraphStarted),
500L,
TimeUnit.MILLISECONDS,
/* threadId */ 0L));
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
buildEventStarted, 500L, TimeUnit.MILLISECONDS, /* threadId */ 0L));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildEvent.finished(buildEventStarted, ExitCode.SUCCESS),
600L,
TimeUnit.MILLISECONDS,
/* threadId */ 0L));
expectedOutput += "CREATING ACTION GRAPH: FINISHED IN 0.2s\n";
expectedOutput += FINISHED_DOWNLOAD_STRING + "\n";
expectedOutput += "BUILDING: FINISHED IN 0.1s\n";
expectedOutput += "BUILD SUCCEEDED\n";
assertOutput(expectedOutput, console);
}
@Test
public void testSimpleHideSucceededBuild() {
setupSimpleConsole(true);
String expectedOutput = "";
assertOutput(expectedOutput, console);
BuildTarget fakeTarget = BuildTargetFactory.newInstance("//banana:stand");
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(fakeTarget);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
FakeBuildRule fakeRule = new FakeBuildRule(fakeTarget, ImmutableSortedSet.of());
final long threadId = 0;
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(buildEventStarted, 0L, TimeUnit.MILLISECONDS, threadId));
ParseEvent.Started parseStarted = ParseEvent.started(buildTargets);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(parseStarted, 0L, TimeUnit.MILLISECONDS, threadId));
assertOutput(expectedOutput, console);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
ParseEvent.finished(parseStarted, 10, Optional.empty()),
400L,
TimeUnit.MILLISECONDS,
threadId));
expectedOutput += "PARSING BUCK FILES: FINISHED IN 0.4s\n";
assertOutput(expectedOutput, console);
BuildRuleEvent.Started started = BuildRuleEvent.started(fakeRule, durationTracker);
eventBus.postWithoutConfiguring(
configureTestEventAtTime(started, 600L, TimeUnit.MILLISECONDS, threadId));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildRuleEvent.finished(
started,
BuildRuleKeys.of(new RuleKey("aaaa")),
BuildRuleStatus.SUCCESS,
CacheResult.miss(),
Optional.empty(),
Optional.of(BuildRuleSuccessType.BUILT_LOCALLY),
false,
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty()),
1000L,
TimeUnit.MILLISECONDS,
threadId));
eventBus.postWithoutConfiguring(
configureTestEventAtTime(
BuildEvent.finished(buildEventStarted, ExitCode.SUCCESS),
1234L,
TimeUnit.MILLISECONDS,
threadId));
expectedOutput +=
FINISHED_DOWNLOAD_STRING + "\n" + "BUILDING: FINISHED IN 1.2s\n" + "BUILD SUCCEEDED\n";
assertOutput(expectedOutput, console);
}
@Test
public void testPrintsStampedeIdForDistributedBuild() {
setupSimpleConsole(true);
String expectedOutput = "";
assertOutput(expectedOutput, console);
eventBus.post(new DistBuildCreatedEvent(STAMPEDE_ID_ONE));
expectedOutput += STAMPEDE_ID_ONE_MESSAGE;
assertOutput(expectedOutput, console);
}
private void assertOutput(String expectedOutput, TestConsole console) {
assertEquals("", console.getTextWrittenToStdOut());
assertEquals(expectedOutput, console.getTextWrittenToStdErr());
}
private void setupSimpleConsole(boolean hideSucceededRules) {
SimpleConsoleEventBusListener listener =
new SimpleConsoleEventBusListener(
console,
fakeClock,
TestResultSummaryVerbosity.of(false, false),
hideSucceededRules,
/* numberOfSlowRulesToShow */ 0,
false,
Locale.US,
logPath,
new DefaultExecutionEnvironment(
ImmutableMap.copyOf(System.getenv()), System.getProperties()),
Optional.empty());
eventBus.register(listener);
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.ds.util;
import java.util.List;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.util.Switch;
import org.wso2.developerstudio.eclipse.ds.*;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)} to invoke
* the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see org.wso2.developerstudio.eclipse.ds.DsPackage
* @generated
*/
public class DsSwitch<T> extends Switch<T> {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static DsPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DsSwitch() {
if (modelPackage == null) {
modelPackage = DsPackage.eINSTANCE;
}
}
/**
* Checks whether this is a switch for the given package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @parameter ePackage the package in question.
* @return whether this is a switch for the given package.
* @generated
*/
@Override
protected boolean isSwitchFor(EPackage ePackage) {
return ePackage == modelPackage;
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
@Override
protected T doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case DsPackage.ATTRIBUTE_MAPPING: {
AttributeMapping attributeMapping = (AttributeMapping)theEObject;
T result = caseAttributeMapping(attributeMapping);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.CALL_QUERY: {
CallQuery callQuery = (CallQuery)theEObject;
T result = caseCallQuery(callQuery);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.CALL_QUERY_LIST: {
CallQueryList callQueryList = (CallQueryList)theEObject;
T result = caseCallQueryList(callQueryList);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.CONFIGURATION_PROPERTY: {
ConfigurationProperty configurationProperty = (ConfigurationProperty)theEObject;
T result = caseConfigurationProperty(configurationProperty);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.CUSTOM_VALIDATOR: {
CustomValidator customValidator = (CustomValidator)theEObject;
T result = caseCustomValidator(customValidator);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.DATA_SERVICE: {
DataService dataService = (DataService)theEObject;
T result = caseDataService(dataService);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.DATA_SOURCE_CONFIGURATION: {
DataSourceConfiguration dataSourceConfiguration = (DataSourceConfiguration)theEObject;
T result = caseDataSourceConfiguration(dataSourceConfiguration);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.DESCRIPTION: {
Description description = (Description)theEObject;
T result = caseDescription(description);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.DOCUMENT_ROOT: {
DocumentRoot documentRoot = (DocumentRoot)theEObject;
T result = caseDocumentRoot(documentRoot);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.DOUBLE_RANGE_VALIDATOR: {
DoubleRangeValidator doubleRangeValidator = (DoubleRangeValidator)theEObject;
T result = caseDoubleRangeValidator(doubleRangeValidator);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.ELEMENT_MAPPING: {
ElementMapping elementMapping = (ElementMapping)theEObject;
T result = caseElementMapping(elementMapping);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.EVENT_SUBSCRIPTION_LIST: {
EventSubscriptionList eventSubscriptionList = (EventSubscriptionList)theEObject;
T result = caseEventSubscriptionList(eventSubscriptionList);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.EVENT_TRIGGER: {
EventTrigger eventTrigger = (EventTrigger)theEObject;
T result = caseEventTrigger(eventTrigger);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.EXCEL_QUERY: {
ExcelQuery excelQuery = (ExcelQuery)theEObject;
T result = caseExcelQuery(excelQuery);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.EXPRESSION: {
Expression expression = (Expression)theEObject;
T result = caseExpression(expression);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.GSPREAD_QUERY: {
GSpreadQuery gSpreadQuery = (GSpreadQuery)theEObject;
T result = caseGSpreadQuery(gSpreadQuery);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.HAS_HEADER: {
HasHeader hasHeader = (HasHeader)theEObject;
T result = caseHasHeader(hasHeader);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.LENGTH_VALIDATOR: {
LengthValidator lengthValidator = (LengthValidator)theEObject;
T result = caseLengthValidator(lengthValidator);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.LONG_RANGE_VALIDATOR: {
LongRangeValidator longRangeValidator = (LongRangeValidator)theEObject;
T result = caseLongRangeValidator(longRangeValidator);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.MAX_ROW_COUNT: {
MaxRowCount maxRowCount = (MaxRowCount)theEObject;
T result = caseMaxRowCount(maxRowCount);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.OPERATION: {
Operation operation = (Operation)theEObject;
T result = caseOperation(operation);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.PARAMETER_MAPPING: {
ParameterMapping parameterMapping = (ParameterMapping)theEObject;
T result = caseParameterMapping(parameterMapping);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.PATTERN_VALIDATOR: {
PatternValidator patternValidator = (PatternValidator)theEObject;
T result = casePatternValidator(patternValidator);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.QUERY: {
Query query = (Query)theEObject;
T result = caseQuery(query);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.QUERY_PARAMETER: {
QueryParameter queryParameter = (QueryParameter)theEObject;
T result = caseQueryParameter(queryParameter);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.QUERY_PROPERTY: {
QueryProperty queryProperty = (QueryProperty)theEObject;
T result = caseQueryProperty(queryProperty);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.QUERY_PROPERTY_LIST: {
QueryPropertyList queryPropertyList = (QueryPropertyList)theEObject;
T result = caseQueryPropertyList(queryPropertyList);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.RESOURCE: {
Resource resource = (Resource)theEObject;
T result = caseResource(resource);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.RESULT_MAPPING: {
ResultMapping resultMapping = (ResultMapping)theEObject;
T result = caseResultMapping(resultMapping);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.SPARQL: {
Sparql sparql = (Sparql)theEObject;
T result = caseSparql(sparql);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.SQL: {
Sql sql = (Sql)theEObject;
T result = caseSql(sql);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.QUERY_EXPRESSION: {
QueryExpression queryExpression = (QueryExpression)theEObject;
T result = caseQueryExpression(queryExpression);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.STARTING_ROW: {
StartingRow startingRow = (StartingRow)theEObject;
T result = caseStartingRow(startingRow);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.SUBSCRIPTION: {
Subscription subscription = (Subscription)theEObject;
T result = caseSubscription(subscription);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.TARGET_TOPIC: {
TargetTopic targetTopic = (TargetTopic)theEObject;
T result = caseTargetTopic(targetTopic);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.WORK_BOOK_NAME: {
WorkBookName workBookName = (WorkBookName)theEObject;
T result = caseWorkBookName(workBookName);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.WORK_SHEET_NUMBER: {
WorkSheetNumber workSheetNumber = (WorkSheetNumber)theEObject;
T result = caseWorkSheetNumber(workSheetNumber);
if (result == null) result = defaultCase(theEObject);
return result;
}
case DsPackage.POLICY: {
Policy policy = (Policy)theEObject;
T result = casePolicy(policy);
if (result == null) result = defaultCase(theEObject);
return result;
}
default: return defaultCase(theEObject);
}
}
/**
* Returns the result of interpreting the object as an instance of '<em>Attribute Mapping</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Attribute Mapping</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseAttributeMapping(AttributeMapping object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Call Query</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Call Query</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseCallQuery(CallQuery object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Call Query List</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Call Query List</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseCallQueryList(CallQueryList object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Configuration Property</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Configuration Property</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseConfigurationProperty(ConfigurationProperty object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Custom Validator</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Custom Validator</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseCustomValidator(CustomValidator object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Data Service</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Data Service</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDataService(DataService object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Data Source Configuration</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Data Source Configuration</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDataSourceConfiguration(DataSourceConfiguration object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Description</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Description</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDescription(Description object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Document Root</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Document Root</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDocumentRoot(DocumentRoot object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Double Range Validator</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Double Range Validator</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDoubleRangeValidator(DoubleRangeValidator object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Element Mapping</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Element Mapping</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseElementMapping(ElementMapping object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Event Subscription List</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Event Subscription List</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseEventSubscriptionList(EventSubscriptionList object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Event Trigger</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Event Trigger</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseEventTrigger(EventTrigger object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Excel Query</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Excel Query</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseExcelQuery(ExcelQuery object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Expression</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Expression</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseExpression(Expression object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>GSpread Query</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>GSpread Query</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseGSpreadQuery(GSpreadQuery object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Has Header</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Has Header</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseHasHeader(HasHeader object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Length Validator</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Length Validator</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseLengthValidator(LengthValidator object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Long Range Validator</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Long Range Validator</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseLongRangeValidator(LongRangeValidator object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Max Row Count</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Max Row Count</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseMaxRowCount(MaxRowCount object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Operation</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Operation</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseOperation(Operation object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Parameter Mapping</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Parameter Mapping</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseParameterMapping(ParameterMapping object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Pattern Validator</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Pattern Validator</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T casePatternValidator(PatternValidator object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Query</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Query</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseQuery(Query object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Query Parameter</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Query Parameter</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseQueryParameter(QueryParameter object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Query Property</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Query Property</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseQueryProperty(QueryProperty object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Query Property List</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Query Property List</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseQueryPropertyList(QueryPropertyList object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Resource</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Resource</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseResource(Resource object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Result Mapping</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Result Mapping</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseResultMapping(ResultMapping object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Sparql</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Sparql</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseSparql(Sparql object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Sql</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Sql</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseSql(Sql object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Query Expression</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Query Expression</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseQueryExpression(QueryExpression object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Starting Row</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Starting Row</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseStartingRow(StartingRow object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Subscription</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Subscription</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseSubscription(Subscription object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Target Topic</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Target Topic</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseTargetTopic(TargetTopic object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Work Book Name</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Work Book Name</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseWorkBookName(WorkBookName object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Work Sheet Number</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Work Sheet Number</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseWorkSheetNumber(WorkSheetNumber object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Policy</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Policy</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T casePolicy(Policy object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the
* last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
@Override
public T defaultCase(EObject object) {
return null;
}
} // DsSwitch
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.index.store.cache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.blobstore.cache.BlobStoreCacheService;
import org.elasticsearch.blobstore.cache.CachedBlob;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo;
import org.elasticsearch.index.snapshots.blobstore.SlicedInputStream;
import org.elasticsearch.index.store.BaseSearchableSnapshotIndexInput;
import org.elasticsearch.index.store.IndexInputStats;
import org.elasticsearch.index.store.SearchableSnapshotDirectory;
import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Locale;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.IntStream;
import static org.elasticsearch.index.store.checksum.ChecksumBlobContainerIndexInput.checksumToBytesArray;
import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants.toIntBytes;
public class CachedBlobContainerIndexInput extends BaseSearchableSnapshotIndexInput {
/**
* Specific IOContext used for prewarming the cache. This context allows to write
* a complete part of the {@link #fileInfo} at once in the cache and should not be
* used for anything else than what the {@link #prefetchPart(int)} method does.
*/
public static final IOContext CACHE_WARMING_CONTEXT = new IOContext();
private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class);
private static final int COPY_BUFFER_SIZE = ByteSizeUnit.KB.toIntBytes(8);
private final SearchableSnapshotDirectory directory;
private final CacheFileReference cacheFileReference;
private final int defaultRangeSize;
// last read position is kept around in order to detect (non)contiguous reads for stats
private long lastReadPosition;
// last seek position is kept around in order to detect forward/backward seeks for stats
private long lastSeekPosition;
public CachedBlobContainerIndexInput(
SearchableSnapshotDirectory directory,
FileInfo fileInfo,
IOContext context,
IndexInputStats stats,
int rangeSize
) {
this(
"CachedBlobContainerIndexInput(" + fileInfo.physicalName() + ")",
directory,
fileInfo,
context,
stats,
0L,
fileInfo.length(),
new CacheFileReference(directory, fileInfo.physicalName(), fileInfo.length()),
rangeSize
);
assert getBufferSize() <= BlobStoreCacheService.DEFAULT_CACHED_BLOB_SIZE; // must be able to cache at least one buffer's worth
stats.incrementOpenCount();
}
private CachedBlobContainerIndexInput(
String resourceDesc,
SearchableSnapshotDirectory directory,
FileInfo fileInfo,
IOContext context,
IndexInputStats stats,
long offset,
long length,
CacheFileReference cacheFileReference,
int rangeSize
) {
super(resourceDesc, directory.blobContainer(), fileInfo, context, stats, offset, length);
this.directory = directory;
this.cacheFileReference = cacheFileReference;
this.lastReadPosition = this.offset;
this.lastSeekPosition = this.offset;
this.defaultRangeSize = rangeSize;
}
@Override
public void innerClose() {
if (isClone == false) {
cacheFileReference.releaseOnClose();
}
}
private void ensureContext(Predicate<IOContext> predicate) throws IOException {
if (predicate.test(context) == false) {
assert false : "this method should not be used with this context " + context;
throw new IOException("Cannot read the index input using context [context=" + context + ", input=" + this + ']');
}
}
private long getDefaultRangeSize() {
return (context != CACHE_WARMING_CONTEXT) ? defaultRangeSize : fileInfo.partSize().getBytes();
}
private Tuple<Long, Long> computeRange(long position) {
final long rangeSize = getDefaultRangeSize();
long start = (position / rangeSize) * rangeSize;
long end = Math.min(start + rangeSize, fileInfo.length());
return Tuple.tuple(start, end);
}
private CacheFile getCacheFileSafe() throws Exception {
final CacheFile cacheFile = cacheFileReference.get();
if (cacheFile == null) {
throw new AlreadyClosedException("Failed to acquire a non-evicted cache file");
}
return cacheFile;
}
@Override
protected void readInternal(ByteBuffer b) throws IOException {
ensureContext(ctx -> ctx != CACHE_WARMING_CONTEXT);
assert assertCurrentThreadIsNotCacheFetchAsync();
final long position = getFilePointer() + this.offset;
final int length = b.remaining();
// We can detect that we're going to read the last 16 bytes (that contains the footer checksum) of the file. Such reads are often
// executed when opening a Directory and since we have the checksum in the snapshot metadata we can use it to fill the ByteBuffer.
if (length == CodecUtil.footerLength() && isClone == false && position == fileInfo.length() - length) {
if (readChecksumFromFileInfo(b)) {
logger.trace("read footer of file [{}] at position [{}], bypassing all caches", fileInfo.physicalName(), position);
return;
}
assert b.remaining() == length;
}
logger.trace("readInternal: read [{}-{}] ([{}] bytes) from [{}]", position, position + length, length, this);
try {
final CacheFile cacheFile = getCacheFileSafe();
try (Releasable ignored = cacheFile.fileLock()) {
// Can we serve the read directly from disk? If so, do so and don't worry about anything else.
final CompletableFuture<Integer> waitingForRead = cacheFile.readIfAvailableOrPending(
Tuple.tuple(position, position + length),
channel -> {
final int read = readCacheFile(channel, position, b);
assert read == length : read + " vs " + length;
return read;
}
);
if (waitingForRead != null) {
final Integer read = waitingForRead.get();
assert read == length;
readComplete(position, length);
return;
}
// Requested data is not on disk, so try the cache index next.
final Tuple<Long, Long> indexCacheMiss; // null if not a miss
// We try to use the cache index if:
// - the file is small enough to be fully cached
final boolean canBeFullyCached = fileInfo.length() <= BlobStoreCacheService.DEFAULT_CACHED_BLOB_SIZE * 2;
// - we're reading the first N bytes of the file
final boolean isStartOfFile = (position + length <= BlobStoreCacheService.DEFAULT_CACHED_BLOB_SIZE);
if (canBeFullyCached || isStartOfFile) {
final CachedBlob cachedBlob = directory.getCachedBlob(fileInfo.physicalName(), 0L, length);
if (cachedBlob == CachedBlob.CACHE_MISS || cachedBlob == CachedBlob.CACHE_NOT_READY) {
// We would have liked to find a cached entry but we did not find anything: the cache on the disk will be requested
// so we compute the region of the file we would like to have the next time. The region is expressed as a tuple of
// {start, end} where positions are relative to the whole file.
if (canBeFullyCached) {
// if the index input is smaller than twice the size of the blob cache, it will be fully indexed
indexCacheMiss = Tuple.tuple(0L, fileInfo.length());
} else {
// the index input is too large to fully cache, so just cache the initial range
indexCacheMiss = Tuple.tuple(0L, (long) BlobStoreCacheService.DEFAULT_CACHED_BLOB_SIZE);
}
// We must fill in a cache miss even if CACHE_NOT_READY since the cache index is only created on the first put.
// TODO TBD use a different trigger for creating the cache index and avoid a put in the CACHE_NOT_READY case.
} else {
logger.trace(
"reading [{}] bytes of file [{}] at position [{}] using cache index",
length,
fileInfo.physicalName(),
position
);
stats.addIndexCacheBytesRead(cachedBlob.length());
final BytesRefIterator cachedBytesIterator = cachedBlob.bytes().slice(toIntBytes(position), length).iterator();
BytesRef bytesRef;
while ((bytesRef = cachedBytesIterator.next()) != null) {
b.put(bytesRef.bytes, bytesRef.offset, bytesRef.length);
}
assert b.position() == length : "copied " + b.position() + " but expected " + length;
try {
final Tuple<Long, Long> cachedRange = Tuple.tuple(cachedBlob.from(), cachedBlob.to());
cacheFile.populateAndRead(
cachedRange,
cachedRange,
channel -> cachedBlob.length(),
(channel, from, to, progressUpdater) -> {
final long startTimeNanos = stats.currentTimeNanos();
final BytesRefIterator iterator = cachedBlob.bytes()
.slice(toIntBytes(from - cachedBlob.from()), toIntBytes(to - from))
.iterator();
long writePosition = from;
BytesRef current;
while ((current = iterator.next()) != null) {
final ByteBuffer byteBuffer = ByteBuffer.wrap(current.bytes, current.offset, current.length);
while (byteBuffer.remaining() > 0) {
writePosition += positionalWrite(channel, writePosition, byteBuffer);
progressUpdater.accept(writePosition);
}
}
assert writePosition == to : writePosition + " vs " + to;
final long endTimeNanos = stats.currentTimeNanos();
stats.addCachedBytesWritten(to - from, endTimeNanos - startTimeNanos);
logger.trace("copied bytes [{}-{}] of file [{}] from cache index to disk", from, to, fileInfo);
},
directory.cacheFetchAsyncExecutor()
);
} catch (Exception e) {
logger.debug(
new ParameterizedMessage(
"failed to store bytes [{}-{}] of file [{}] obtained from index cache",
cachedBlob.from(),
cachedBlob.to(),
fileInfo
),
e
);
// oh well, no big deal, at least we can return them to the caller.
}
readComplete(position, length);
return;
}
} else {
// requested range is not eligible for caching
indexCacheMiss = null;
}
// Requested data is also not in the cache index, so we must visit the blob store to satisfy both the target range and any
// miss in the cache index.
final Tuple<Long, Long> startRangeToWrite = computeRange(position);
final Tuple<Long, Long> endRangeToWrite = computeRange(position + length - 1);
assert startRangeToWrite.v2() <= endRangeToWrite.v2() : startRangeToWrite + " vs " + endRangeToWrite;
final Tuple<Long, Long> rangeToWrite = Tuple.tuple(
Math.min(startRangeToWrite.v1(), indexCacheMiss == null ? Long.MAX_VALUE : indexCacheMiss.v1()),
Math.max(endRangeToWrite.v2(), indexCacheMiss == null ? Long.MIN_VALUE : indexCacheMiss.v2())
);
assert rangeToWrite.v1() <= position && position + length <= rangeToWrite.v2() : "["
+ position
+ "-"
+ (position + length)
+ "] vs "
+ rangeToWrite;
final Tuple<Long, Long> rangeToRead = Tuple.tuple(position, position + length);
final CompletableFuture<Integer> populateCacheFuture = cacheFile.populateAndRead(rangeToWrite, rangeToRead, channel -> {
final int read;
if ((rangeToRead.v2() - rangeToRead.v1()) < b.remaining()) {
final ByteBuffer duplicate = b.duplicate();
duplicate.limit(duplicate.position() + toIntBytes(rangeToRead.v2() - rangeToRead.v1()));
read = readCacheFile(channel, position, duplicate);
assert duplicate.position() <= b.limit();
b.position(duplicate.position());
} else {
read = readCacheFile(channel, position, b);
}
return read;
}, this::writeCacheFile, directory.cacheFetchAsyncExecutor());
if (indexCacheMiss != null) {
final Releasable onCacheFillComplete = stats.addIndexCacheFill();
final CompletableFuture<Integer> readFuture = cacheFile.readIfAvailableOrPending(indexCacheMiss, channel -> {
final int indexCacheMissLength = toIntBytes(indexCacheMiss.v2() - indexCacheMiss.v1());
// We assume that we only cache small portions of blobs so that we do not need to:
// - use a BigArrays for allocation
// - use an intermediate copy buffer to read the file in sensibly-sized chunks
// - release the buffer once the indexing operation is complete
assert indexCacheMissLength <= COPY_BUFFER_SIZE : indexCacheMiss;
final ByteBuffer byteBuffer = ByteBuffer.allocate(indexCacheMissLength);
Channels.readFromFileChannelWithEofException(channel, indexCacheMiss.v1(), byteBuffer);
// NB use Channels.readFromFileChannelWithEofException not readCacheFile() to avoid counting this in the stats
byteBuffer.flip();
final BytesReference content = BytesReference.fromByteBuffer(byteBuffer);
directory.putCachedBlob(fileInfo.physicalName(), indexCacheMiss.v1(), content, new ActionListener<>() {
@Override
public void onResponse(Void response) {
onCacheFillComplete.close();
}
@Override
public void onFailure(Exception e1) {
onCacheFillComplete.close();
}
});
return indexCacheMissLength;
});
if (readFuture == null) {
// Normally doesn't happen, we're already obtaining a range covering all cache misses above, but theoretically
// possible in the case that the real populateAndRead call already failed to obtain this range of the file. In that
// case, simply move on.
onCacheFillComplete.close();
}
}
final int bytesRead = populateCacheFuture.get();
assert bytesRead == length : bytesRead + " vs " + length;
}
} catch (final Exception e) {
// may have partially filled the buffer before the exception was thrown, so try and get the remainder directly.
final int alreadyRead = length - b.remaining();
final int bytesRead = readDirectlyIfAlreadyClosed(position + alreadyRead, b, e);
assert alreadyRead + bytesRead == length : alreadyRead + " + " + bytesRead + " vs " + length;
// In principle we could handle an index cache miss here too, ensuring that the direct read was large enough, but this is
// already a rare case caused by an overfull/undersized cache.
}
readComplete(position, length);
}
private void readComplete(long position, int length) {
stats.incrementBytesRead(lastReadPosition, position, length);
lastReadPosition = position + length;
lastSeekPosition = lastReadPosition;
}
private int readDirectlyIfAlreadyClosed(long position, ByteBuffer b, Exception e) throws IOException {
if (e instanceof AlreadyClosedException || (e.getCause() != null && e.getCause() instanceof AlreadyClosedException)) {
try {
// cache file was evicted during the range fetching, read bytes directly from blob container
final long length = b.remaining();
final byte[] copyBuffer = new byte[toIntBytes(Math.min(COPY_BUFFER_SIZE, length))];
logger.trace(
() -> new ParameterizedMessage(
"direct reading of range [{}-{}] for cache file [{}]",
position,
position + length,
cacheFileReference
)
);
int bytesCopied = 0;
final long startTimeNanos = stats.currentTimeNanos();
try (InputStream input = openInputStreamFromBlobStore(position, length)) {
long remaining = length;
while (remaining > 0) {
final int len = (remaining < copyBuffer.length) ? (int) remaining : copyBuffer.length;
int bytesRead = input.read(copyBuffer, 0, len);
if (bytesRead == -1) {
throw new EOFException(
String.format(
Locale.ROOT,
"unexpected EOF reading [%d-%d] ([%d] bytes remaining) from %s",
position,
position + length,
remaining,
cacheFileReference
)
);
}
b.put(copyBuffer, 0, bytesRead);
bytesCopied += bytesRead;
remaining -= bytesRead;
assert remaining == b.remaining() : remaining + " vs " + b.remaining();
}
final long endTimeNanos = stats.currentTimeNanos();
stats.addDirectBytesRead(bytesCopied, endTimeNanos - startTimeNanos);
}
return bytesCopied;
} catch (Exception inner) {
e.addSuppressed(inner);
}
}
throw new IOException("failed to read data from cache", e);
}
private boolean readChecksumFromFileInfo(ByteBuffer b) throws IOException {
assert isClone == false;
byte[] footer;
try {
footer = checksumToBytesArray(fileInfo.checksum());
} catch (NumberFormatException e) {
// tests disable this optimisation by passing an invalid checksum
footer = null;
}
if (footer == null) {
return false;
}
b.put(footer);
assert b.remaining() == 0L;
return true;
// TODO we should add this to DirectBlobContainerIndexInput too.
}
/**
* Prefetches a complete part and writes it in cache. This method is used to prewarm the cache.
*/
public void prefetchPart(final int part) throws IOException {
ensureContext(ctx -> ctx == CACHE_WARMING_CONTEXT);
if (part >= fileInfo.numberOfParts()) {
throw new IllegalArgumentException("Unexpected part number [" + part + "]");
}
final Tuple<Long, Long> partRange = computeRange(IntStream.range(0, part).mapToLong(fileInfo::partBytes).sum());
assert assertRangeIsAlignedWithPart(partRange);
try {
final CacheFile cacheFile = getCacheFileSafe();
final Tuple<Long, Long> range = cacheFile.getAbsentRangeWithin(partRange.v1(), partRange.v2());
if (range == null) {
logger.trace(
"prefetchPart: part [{}] bytes [{}-{}] is already fully available for cache file [{}]",
part,
partRange.v1(),
partRange.v2(),
cacheFileReference
);
return;
}
final long rangeStart = range.v1();
final long rangeEnd = range.v2();
final long rangeLength = rangeEnd - rangeStart;
logger.trace(
"prefetchPart: prewarming part [{}] bytes [{}-{}] by fetching bytes [{}-{}] for cache file [{}]",
part,
partRange.v1(),
partRange.v2(),
rangeStart,
rangeEnd,
cacheFileReference
);
final byte[] copyBuffer = new byte[toIntBytes(Math.min(COPY_BUFFER_SIZE, rangeLength))];
long totalBytesRead = 0L;
final AtomicLong totalBytesWritten = new AtomicLong();
long remainingBytes = rangeEnd - rangeStart;
final long startTimeNanos = stats.currentTimeNanos();
try (InputStream input = openInputStreamFromBlobStore(rangeStart, rangeLength)) {
while (remainingBytes > 0L) {
assert totalBytesRead + remainingBytes == rangeLength;
final int bytesRead = readSafe(input, copyBuffer, rangeStart, rangeEnd, remainingBytes, cacheFileReference);
// The range to prewarm in cache
final long readStart = rangeStart + totalBytesRead;
final Tuple<Long, Long> rangeToWrite = Tuple.tuple(readStart, readStart + bytesRead);
// We do not actually read anything, but we want to wait for the write to complete before proceeding.
// noinspection UnnecessaryLocalVariable
final Tuple<Long, Long> rangeToRead = rangeToWrite;
try (Releasable ignored = cacheFile.fileLock()) {
assert assertFileChannelOpen(cacheFile.getChannel());
cacheFile.populateAndRead(
rangeToWrite,
rangeToRead,
(channel) -> bytesRead,
(channel, start, end, progressUpdater) -> {
final ByteBuffer byteBuffer = ByteBuffer.wrap(
copyBuffer,
toIntBytes(start - readStart),
toIntBytes(end - start)
);
final int writtenBytes = positionalWrite(channel, start, byteBuffer);
logger.trace(
"prefetchPart: writing range [{}-{}] of file [{}], [{}] bytes written",
start,
end,
fileInfo.physicalName(),
writtenBytes
);
totalBytesWritten.addAndGet(writtenBytes);
progressUpdater.accept(start + writtenBytes);
},
directory.cacheFetchAsyncExecutor()
).get();
}
totalBytesRead += bytesRead;
remainingBytes -= bytesRead;
}
final long endTimeNanos = stats.currentTimeNanos();
stats.addCachedBytesWritten(totalBytesWritten.get(), endTimeNanos - startTimeNanos);
}
assert totalBytesRead == rangeLength;
} catch (final Exception e) {
throw new IOException("Failed to prefetch file part in cache", e);
}
}
@SuppressForbidden(reason = "Use positional writes on purpose")
private static int positionalWrite(FileChannel fc, long start, ByteBuffer byteBuffer) throws IOException {
assert assertCurrentThreadMayWriteCacheFile();
return fc.write(byteBuffer, start);
}
/**
* Perform a single {@code read()} from {@code inputStream} into {@code copyBuffer}, handling an EOF by throwing an {@link EOFException}
* rather than returning {@code -1}. Returns the number of bytes read, which is always positive.
*
* Most of its arguments are there simply to make the message of the {@link EOFException} more informative.
*/
private static int readSafe(
InputStream inputStream,
byte[] copyBuffer,
long rangeStart,
long rangeEnd,
long remaining,
CacheFileReference cacheFileReference
) throws IOException {
final int len = (remaining < copyBuffer.length) ? toIntBytes(remaining) : copyBuffer.length;
final int bytesRead = inputStream.read(copyBuffer, 0, len);
if (bytesRead == -1) {
throw new EOFException(
String.format(
Locale.ROOT,
"unexpected EOF reading [%d-%d] ([%d] bytes remaining) from %s",
rangeStart,
rangeEnd,
remaining,
cacheFileReference
)
);
}
assert bytesRead > 0 : bytesRead;
return bytesRead;
}
/**
* Asserts that the range of bytes to warm in cache is aligned with {@link #fileInfo}'s part size.
*/
private boolean assertRangeIsAlignedWithPart(Tuple<Long, Long> range) {
if (fileInfo.numberOfParts() == 1L) {
final long length = fileInfo.length();
assert range.v1() == 0L : "start of range [" + range.v1() + "] is not aligned with zero";
assert range.v2() == length : "end of range [" + range.v2() + "] is not aligned with file length [" + length + ']';
} else {
final long length = fileInfo.partSize().getBytes();
assert range.v1() % length == 0L : "start of range [" + range.v1() + "] is not aligned with part start";
assert range.v2() % length == 0L || (range.v2() == fileInfo.length()) : "end of range ["
+ range.v2()
+ "] is not aligned with part end or with file length";
}
return true;
}
private int readCacheFile(final FileChannel fc, final long position, final ByteBuffer buffer) throws IOException {
assert assertFileChannelOpen(fc);
final int bytesRead = Channels.readFromFileChannel(fc, position, buffer);
if (bytesRead == -1) {
throw new EOFException(
String.format(
Locale.ROOT,
"unexpected EOF reading [%d-%d] from %s",
position,
position + buffer.remaining(),
cacheFileReference
)
);
}
stats.addCachedBytesRead(bytesRead);
return bytesRead;
}
private void writeCacheFile(final FileChannel fc, final long start, final long end, final Consumer<Long> progressUpdater)
throws IOException {
assert assertFileChannelOpen(fc);
assert assertCurrentThreadMayWriteCacheFile();
final long length = end - start;
final byte[] copyBuffer = new byte[toIntBytes(Math.min(COPY_BUFFER_SIZE, length))];
logger.trace(() -> new ParameterizedMessage("writing range [{}-{}] to cache file [{}]", start, end, cacheFileReference));
long bytesCopied = 0L;
long remaining = end - start;
final long startTimeNanos = stats.currentTimeNanos();
try (InputStream input = openInputStreamFromBlobStore(start, length)) {
while (remaining > 0L) {
final int bytesRead = readSafe(input, copyBuffer, start, end, remaining, cacheFileReference);
positionalWrite(fc, start + bytesCopied, ByteBuffer.wrap(copyBuffer, 0, bytesRead));
bytesCopied += bytesRead;
remaining -= bytesRead;
progressUpdater.accept(start + bytesCopied);
}
final long endTimeNanos = stats.currentTimeNanos();
stats.addCachedBytesWritten(bytesCopied, endTimeNanos - startTimeNanos);
}
}
/**
* Opens an {@link InputStream} for the given range of bytes which reads the data directly from the blob store. If the requested range
* spans multiple blobs then this stream will request them in turn.
*
* @param position The start of the range of bytes to read, relative to the start of the corresponding Lucene file.
* @param length The number of bytes to read
*/
private InputStream openInputStreamFromBlobStore(final long position, final long length) throws IOException {
assert assertCurrentThreadMayAccessBlobStore();
if (fileInfo.numberOfParts() == 1L) {
assert position + length <= fileInfo.partBytes(0) : "cannot read ["
+ position
+ "-"
+ (position + length)
+ "] from ["
+ fileInfo
+ "]";
stats.addBlobStoreBytesRequested(length);
return blobContainer.readBlob(fileInfo.partName(0), position, length);
} else {
final int startPart = getPartNumberForPosition(position);
final int endPart = getPartNumberForPosition(position + length - 1);
for (int currentPart = startPart; currentPart <= endPart; currentPart++) {
final long startInPart = (currentPart == startPart) ? getRelativePositionInPart(position) : 0L;
final long endInPart = (currentPart == endPart)
? getRelativePositionInPart(position + length - 1) + 1
: getLengthOfPart(currentPart);
stats.addBlobStoreBytesRequested(endInPart - startInPart);
}
return new SlicedInputStream(endPart - startPart + 1) {
@Override
protected InputStream openSlice(int slice) throws IOException {
final int currentPart = startPart + slice;
final long startInPart = (currentPart == startPart) ? getRelativePositionInPart(position) : 0L;
final long endInPart = (currentPart == endPart)
? getRelativePositionInPart(position + length - 1) + 1
: getLengthOfPart(currentPart);
return blobContainer.readBlob(fileInfo.partName(currentPart), startInPart, endInPart - startInPart);
}
};
}
}
/**
* Compute the part number that contains the byte at the given position in the corresponding Lucene file.
*/
private int getPartNumberForPosition(long position) {
ensureValidPosition(position);
final int part = Math.toIntExact(position / fileInfo.partSize().getBytes());
assert part <= fileInfo.numberOfParts() : "part number [" + part + "] exceeds number of parts: " + fileInfo.numberOfParts();
assert part >= 0 : "part number [" + part + "] is negative";
return part;
}
/**
* Compute the position of the given byte relative to the start of its part.
* @param position the position of the required byte (within the corresponding Lucene file)
*/
private long getRelativePositionInPart(long position) {
ensureValidPosition(position);
final long pos = position % fileInfo.partSize().getBytes();
assert pos < fileInfo.partBytes(getPartNumberForPosition(pos)) : "position in part [" + pos + "] exceeds part's length";
assert pos >= 0L : "position in part [" + pos + "] is negative";
return pos;
}
private long getLengthOfPart(int part) {
return fileInfo.partBytes(part);
}
private void ensureValidPosition(long position) {
assert position >= 0L && position < fileInfo.length() : position + " vs " + fileInfo.length();
// noinspection ConstantConditions in case assertions are disabled
if (position < 0L || position >= fileInfo.length()) {
throw new IllegalArgumentException("Position [" + position + "] is invalid for a file of length [" + fileInfo.length() + "]");
}
}
@Override
protected void seekInternal(long pos) throws IOException {
if (pos > length()) {
throw new EOFException("Reading past end of file [position=" + pos + ", length=" + length() + "] for " + toString());
} else if (pos < 0L) {
throw new IOException("Seeking to negative position [" + pos + "] for " + toString());
}
final long position = pos + this.offset;
stats.incrementSeeks(lastSeekPosition, position);
lastSeekPosition = position;
}
@Override
public CachedBlobContainerIndexInput clone() {
return (CachedBlobContainerIndexInput) super.clone();
}
@Override
public IndexInput slice(String sliceDescription, long offset, long length) {
if (offset < 0 || length < 0 || offset + length > length()) {
throw new IllegalArgumentException(
"slice() "
+ sliceDescription
+ " out of bounds: offset="
+ offset
+ ",length="
+ length
+ ",fileLength="
+ length()
+ ": "
+ this
);
}
final CachedBlobContainerIndexInput slice = new CachedBlobContainerIndexInput(
getFullSliceDescription(sliceDescription),
directory,
fileInfo,
context,
stats,
this.offset + offset,
length,
cacheFileReference,
defaultRangeSize
);
slice.isClone = true;
return slice;
}
@Override
public String toString() {
return "CachedBlobContainerIndexInput{"
+ "cacheFileReference="
+ cacheFileReference
+ ", offset="
+ offset
+ ", length="
+ length()
+ ", position="
+ getFilePointer()
+ ", rangeSize="
+ getDefaultRangeSize()
+ ", directory="
+ directory
+ '}';
}
private static class CacheFileReference implements CacheFile.EvictionListener {
private final long fileLength;
private final CacheKey cacheKey;
private final SearchableSnapshotDirectory directory;
private final AtomicReference<CacheFile> cacheFile = new AtomicReference<>(); // null if evicted or not yet acquired
private CacheFileReference(SearchableSnapshotDirectory directory, String fileName, long fileLength) {
this.cacheKey = directory.createCacheKey(fileName);
this.fileLength = fileLength;
this.directory = directory;
}
@Nullable
CacheFile get() throws Exception {
CacheFile currentCacheFile = cacheFile.get();
if (currentCacheFile != null) {
return currentCacheFile;
}
final CacheFile newCacheFile = directory.getCacheFile(cacheKey, fileLength);
synchronized (this) {
currentCacheFile = cacheFile.get();
if (currentCacheFile != null) {
return currentCacheFile;
}
if (newCacheFile.acquire(this)) {
final CacheFile previousCacheFile = cacheFile.getAndSet(newCacheFile);
assert previousCacheFile == null;
return newCacheFile;
}
}
return null;
}
@Override
public void onEviction(final CacheFile evictedCacheFile) {
synchronized (this) {
if (cacheFile.compareAndSet(evictedCacheFile, null)) {
evictedCacheFile.release(this);
}
}
}
void releaseOnClose() {
synchronized (this) {
final CacheFile currentCacheFile = cacheFile.getAndSet(null);
if (currentCacheFile != null) {
currentCacheFile.release(this);
}
}
}
@Override
public String toString() {
return "CacheFileReference{"
+ "cacheKey='"
+ cacheKey
+ '\''
+ ", fileLength="
+ fileLength
+ ", acquired="
+ (cacheFile.get() != null)
+ '}';
}
}
private static boolean assertFileChannelOpen(FileChannel fileChannel) {
assert fileChannel != null;
assert fileChannel.isOpen();
return true;
}
private static boolean isCacheFetchAsyncThread(final String threadName) {
return threadName.contains('[' + SearchableSnapshotsConstants.CACHE_FETCH_ASYNC_THREAD_POOL_NAME + ']');
}
private static boolean assertCurrentThreadMayWriteCacheFile() {
final String threadName = Thread.currentThread().getName();
assert isCacheFetchAsyncThread(threadName) : "expected the current thread ["
+ threadName
+ "] to belong to the cache fetch async thread pool";
return true;
}
private static boolean assertCurrentThreadIsNotCacheFetchAsync() {
final String threadName = Thread.currentThread().getName();
assert false == isCacheFetchAsyncThread(threadName) : "expected the current thread ["
+ threadName
+ "] to belong to the cache fetch async thread pool";
return true;
}
}
| |
package com.dexafree.materialList.card;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.support.annotation.ColorInt;
import android.support.annotation.ColorRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.IdRes;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.support.v7.widget.CardView;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.dexafree.materialList.R;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.RequestCreator;
import java.util.HashMap;
import java.util.Map;
import java.util.Observable;
/**
* The basic CardProvider.
*/
public class CardProvider<T extends CardProvider> extends Observable {
private final static int DIVIDER_MARGIN_DP = 16;
private Context mContext;
private Card.Builder mBuilder;
private String mTitle;
private String mSubtitle;
private String mDescription;
private boolean mDividerVisible;
private boolean mFullWidthDivider;
private int mTitleGravity;
private int mSubtitleGravity;
private int mDescriptionGravity;
@ColorInt
private int mBackgroundColor = Color.WHITE;
@ColorInt
private int mTitleColor;
@ColorInt
private int mSubtitleColor;
@ColorInt
private int mDescriptionColor;
@ColorInt
private int mDividerColor;
@Nullable
private Drawable mDrawable;
@Nullable
private String mUrlImage;
private final Map<Integer, Action> mActionMapping = new HashMap<>();
private OnImageConfigListener mOnImageConfigListenerListener;
private int mLayoutId;
/////////////////////////////////////////////////////////////////
//
// Functions related to the builder pattern.
//
/////////////////////////////////////////////////////////////////
/**
* Do NOT use this method! Only for the {@code Card.Builder}!
*
* @param context to access the resources.
*/
void setContext(Context context) {
mContext = context;
onCreated();
}
/**
* Do NOT use this method! Only for the {@code Card.Builder}!
*
* @param builder to return the {@code Card.Builder} by {@code endConfig}.
*/
void setBuilder(Card.Builder builder) {
mBuilder = builder;
}
protected void onCreated() {
setTitleResourceColor(R.color.grey_title);
setDescriptionResourceColor(R.color.description_color);
}
/**
* Get the context.
*
* @return the context.
*/
protected Context getContext() {
return mContext;
}
/**
* End withProvider the configuration.
*
* @return the {@code Card.Builder}.
*/
public Card.Builder endConfig() {
return mBuilder;
}
/**
* Notifies the Card that the content changed.
*/
protected void notifyDataSetChanged() {
notifyDataSetChanged(null);
}
/**
* Notifies the Card that the content changed.
*/
protected void notifyDataSetChanged(@Nullable final Object object) {
setChanged();
notifyObservers(object);
}
/////////////////////////////////////////////////////////////////
//
// Functions related to setting and getting the properties.
//
/////////////////////////////////////////////////////////////////
@NonNull
@SuppressWarnings("unchecked")
public T setLayout(@LayoutRes final int layoutId) {
mLayoutId = layoutId;
return (T) this;
}
/**
* Get the card layout as resource.
*
* @return the card layout.
*/
@LayoutRes
public int getLayout() {
return mLayoutId;
}
/**
* Get the background color.
*
* @return the background color.
*/
@ColorInt
public int getBackgroundColor() {
return mBackgroundColor;
}
/**
* Set the background color withProvider an real color (e.g. {@code Color.WHITE}).
*
* @param color as real.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setBackgroundColor(@ColorInt final int color) {
mBackgroundColor = color;
notifyDataSetChanged();
return (T) this;
}
/**
* Set the background color withProvider an resource color (e.g. {@code
* android.R.color.white}).
*
* @param color as resource.
* @return the renderer.
*/
@NonNull
public T setBackgroundResourceColor(@ColorRes final int color) {
return setBackgroundColor(getContext().getResources().getColor(color));
}
/**
* Get the title.
*
* @return the title.
*/
public String getTitle() {
return mTitle;
}
/**
* Set the title withProvider a string resource.
*
* @param title to set.
* @return the renderer.
*/
@NonNull
public T setTitle(@StringRes final int title) {
return setTitle(getContext().getString(title));
}
/**
* Set the title.
*
* @param title to set.
* @return the renderer.
*/
@SuppressWarnings("unchecked")
public T setTitle(@NonNull final String title) {
mTitle = title;
notifyDataSetChanged();
return (T) this;
}
/**
* Set Gravity of title.
*
* @param titleGravity
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setTitleGravity(final int titleGravity) {
mTitleGravity = titleGravity;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the subtitle.
*
* @return the subtitle.
*/
public String getSubtitle() {
return mSubtitle;
}
/**
* Set the subtitle as resource.
*
* @param subtitle to set.
* @return the renderer.
*/
@NonNull
public T setSubtitle(@StringRes final int subtitle) {
return setSubtitle(getContext().getString(subtitle));
}
/**
* Set the subtitle.
*
* @param subtitle to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setSubtitle(final String subtitle) {
mSubtitle = subtitle;
notifyDataSetChanged();
return (T) this;
}
/**
* Set Gravity of subtitle
*
* @param subtitleGravity
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setSubtitleGravity(final int subtitleGravity) {
mSubtitleGravity = subtitleGravity;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the description.
*
* @return the description.
*/
public String getDescription() {
return mDescription;
}
/**
* Set the description withProvider a string resource.
*
* @param description to set.
* @return the renderer.
*/
@NonNull
public T setDescription(@StringRes final int description) {
return setDescription(getContext().getString(description));
}
/**
* Set the description.
*
* @param description to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDescription(@NonNull final String description) {
mDescription = description;
notifyDataSetChanged();
return (T) this;
}
/**
*Set Gravity of description
*
* @param descriptionGravity to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDescriptionGravity(final int descriptionGravity) {
mDescriptionGravity = descriptionGravity;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the divider color as int.
*
* @return the divider color.
*/
@ColorInt
public int getDividerColor() {
return mDividerColor;
}
/**
* Set the divider color as resource.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
public T setDividerResourceColor(@ColorRes final int color) {
return setDividerColor(getContext().getResources().getColor(color));
}
/**
* Set the divider color as int.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDividerColor(@ColorInt final int color) {
mDividerColor = color;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the drawable.
*
* @return the drawable.
*/
public Drawable getDrawable() {
return mDrawable;
}
/**
* Set the drawable withProvider a drawable resource.
*
* @param drawable to set.
* @return the renderer.
*/
@NonNull
public T setDrawable(@DrawableRes final int drawable) {
return setDrawable(Uri.parse("android.resource://" + getContext().getPackageName()
+ "/" + drawable).toString());
}
/**
* Set the drawable. This drawable can not be configured inside of the ImageView. It will
* directly be drawn. If the configuration of the image is necessary, use {@link
* #setDrawable(int)} or {@link #setDrawable(String)} and {@link
* #setDrawableConfiguration(OnImageConfigListener)} to configure the render process.
*
* @param drawable to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDrawable(@Nullable final Drawable drawable) {
mDrawable = drawable;
notifyDataSetChanged();
return (T) this;
}
/**
* Set the drawable withProvider a web url.
*
* @param urlImage to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDrawable(@Nullable final String urlImage) {
mUrlImage = urlImage;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the web url.
*
* @return the url.
*/
public String getImageUrl() {
return mUrlImage;
}
/**
* Get the title color as int.
*
* @return the color.
*/
@ColorInt
public int getTitleColor() {
return mTitleColor;
}
/**
* Get the title gravity as int.
*
* @return the gravity.
*/
public int getTitleGravity(){
return mTitleGravity;
}
/**
* Set the title color as int.
*
* @param color to set as int.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setTitleColor(@ColorInt final int color) {
mTitleColor = color;
notifyDataSetChanged();
return (T) this;
}
/**
* Set the title color as resource.
*
* @param color to set as resource.
* @return the renderer.
*/
@NonNull
public T setTitleResourceColor(@ColorRes final int color) {
return setTitleColor(getContext().getResources().getColor(color));
}
/**
* Get the subtitle color as int.
*
* @return the subtitle color.
*/
@ColorInt
public int getSubtitleColor() {
return mSubtitleColor;
}
/**
* Get the subtitle gravity as int.
*
* @return the subtitle gravity.
*/
public int getSubtitleGravity(){
return mSubtitleGravity;
}
/**
* Set the subtitle color as resource.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
public T setSubtitleResourceColor(@ColorRes final int color) {
return setSubtitleColor(getContext().getResources().getColor(color));
}
/**
* Set the subtitle color as int.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setSubtitleColor(@ColorInt final int color) {
mSubtitleColor = color;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the description color as int.
*
* @return the color.
*/
@ColorInt
public int getDescriptionColor() {
return mDescriptionColor;
}
/**
* Get the description gravity as int.
*
* @return the gravity.
*/
public int getDescriptionGravity() {
return mDescriptionGravity;
}
/**
* Set the description color as int.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDescriptionColor(@ColorInt final int color) {
mDescriptionColor = color;
notifyDataSetChanged();
return (T) this;
}
/**
* Set the description color as resource.
*
* @param color to set.
* @return the renderer.
*/
@NonNull
public T setDescriptionResourceColor(@ColorRes final int color) {
return setDescriptionColor(getContext().getResources().getColor(color));
}
/**
* Set the listener for image customizations.
*
* @param listener to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDrawableConfiguration(@NonNull final OnImageConfigListener listener) {
mOnImageConfigListenerListener = listener;
return (T) this;
}
/**
* Get the listener.
*
* @return the listener.
*/
public OnImageConfigListener getOnImageConfigListenerListener() {
return mOnImageConfigListenerListener;
}
/**
* Get the visibility state of the divider.
*
* @return the visibility state of the divider.
*/
public boolean isDividerVisible() {
return mDividerVisible;
}
/**
* Set the divider visible or invisible.
*
* @param visible to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setDividerVisible(final boolean visible) {
mDividerVisible = visible;
notifyDataSetChanged();
return (T) this;
}
/**
* Get the state of wideness of the divider.
*
* @return the wideness of the divider.
*/
public boolean isFullWidthDivider() {
return mFullWidthDivider;
}
/**
* Set the wideness of the divider to full.
*
* @param fullWidthDivider to set.
* @return the renderer.
*/
@NonNull
@SuppressWarnings("unchecked")
public T setFullWidthDivider(final boolean fullWidthDivider) {
mFullWidthDivider = fullWidthDivider;
notifyDataSetChanged();
return (T) this;
}
/**
*
* @param actionViewId
* @param action
* @return
*/
@NonNull
@SuppressWarnings("unchecked")
public T addAction(@IdRes final int actionViewId, @NonNull final Action action) {
mActionMapping.put(actionViewId, action);
return (T) this;
}
/**
*
* @param actionViewId
* @return
*/
@Nullable
public Action getAction(@IdRes final int actionViewId) {
return mActionMapping.get(actionViewId);
}
/////////////////////////////////////////////////////////////////
//
// Functions for rendering.
//
/////////////////////////////////////////////////////////////////
/**
* Renders the content and style of the card to the view.
*
* @param view to display the content and style on.
* @param card to render.
*/
@SuppressWarnings("unchecked")
public void render(@NonNull final View view, @NonNull final Card card) {
// The card background
final CardView cardView = findViewById(view, R.id.cardView, CardView.class);
if (cardView != null) {
cardView.setCardBackgroundColor(getBackgroundColor());
}
// Title
final TextView title = findViewById(view, R.id.title, TextView.class);
if (title != null) {
title.setText(getTitle());
title.setTextColor(getTitleColor());
title.setGravity(getTitleGravity());
}
// Subtitle
final TextView subtitle = findViewById(view, R.id.subtitle, TextView.class);
if (subtitle != null) {
subtitle.setText(getSubtitle());
subtitle.setTextColor(getSubtitleColor());
subtitle.setGravity(getSubtitleGravity());
if (getSubtitle() == null || getSubtitle().isEmpty()) {
subtitle.setVisibility(View.GONE);
} else {
subtitle.setVisibility(View.VISIBLE);
}
}
// Description
final TextView supportingText = findViewById(view, R.id.supportingText, TextView.class);
if (supportingText != null) {
supportingText.setText(getDescription());
supportingText.setTextColor(getDescriptionColor());
supportingText.setGravity(getDescriptionGravity());
}
// Image
final ImageView imageView = findViewById(view, R.id.image, ImageView.class);
if (imageView != null) {
if (getDrawable() != null) {
imageView.setImageDrawable(getDrawable());
} else {
final RequestCreator requestCreator = Picasso.with(getContext())
.load(getImageUrl());
if (getOnImageConfigListenerListener() != null) {
getOnImageConfigListenerListener().onImageConfigure(requestCreator);
}
requestCreator.into(imageView);
}
}
// Divider
final View divider = findViewById(view, R.id.divider, View.class);
if (divider != null) {
divider.setVisibility(isDividerVisible() ? View.VISIBLE : View.INVISIBLE);
// After setting the visibility, we prepare the divider params
// according to the preferences
if (isDividerVisible()) {
// If the divider has to be from side to side, the margin will be 0
final ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams)
divider.getLayoutParams();
if (isFullWidthDivider()) {
params.setMargins(0, 0, 0, 0);
} else {
int dividerMarginPx = dpToPx(DIVIDER_MARGIN_DP);
// Set the margin
params.setMargins(
dividerMarginPx,
0,
dividerMarginPx,
0
);
}
}
}
// Actions
for (final Map.Entry<Integer, Action> entry : mActionMapping.entrySet()) {
final View actionViewRaw = findViewById(view, entry.getKey(), View.class);
if (actionViewRaw != null) {
final Action action = entry.getValue();
action.setProvider(this);
action.onRender(actionViewRaw, card);
}
}
}
@Nullable
protected <V extends View> V findViewById(@NonNull final View view, @IdRes final int id,
@NonNull final Class<V> type) {
final View viewById = view.findViewById(id);
if (viewById != null) {
return type.cast(viewById);
} else {
return null;
}
}
/////////////////////////////////////////////////////////////////
//
// Interfaces.
//
/////////////////////////////////////////////////////////////////
/**
* The OnImageConfigListener will be called, if an image is loaded from an url to an ImageView.
*/
public interface OnImageConfigListener {
/**
* An image is loaded from an url and can be customized now.
*
* @param requestCreator to customize the image.
*/
void onImageConfigure(@NonNull final RequestCreator requestCreator);
}
/////////////////////////////////////////////////////////////////
//
// Helper methods.
//
/////////////////////////////////////////////////////////////////
/**
*
* @param dp
* @return
*/
protected int dpToPx(final int dp) {
DisplayMetrics displayMetrics = getContext().getResources().getDisplayMetrics();
return (int) Math.round(dp * (displayMetrics.xdpi / DisplayMetrics.DENSITY_DEFAULT));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.mapreduce;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hive.hcatalog.common.ErrorType;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Record writer container for tables using dynamic partitioning. See
* {@link FileOutputFormatContainer} for more information
*/
class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContainer {
private static final Logger LOG = LoggerFactory.getLogger(DynamicPartitionFileRecordWriterContainer.class);
private final List<Integer> dynamicPartCols;
private int maxDynamicPartitions;
private final Map<String, RecordWriter<? super WritableComparable<?>, ? super Writable>> baseDynamicWriters;
private final Map<String, SerDe> baseDynamicSerDe;
private final Map<String, org.apache.hadoop.mapred.OutputCommitter> baseDynamicCommitters;
private final Map<String, org.apache.hadoop.mapred.TaskAttemptContext> dynamicContexts;
private final Map<String, ObjectInspector> dynamicObjectInspectors;
private Map<String, OutputJobInfo> dynamicOutputJobInfo;
/**
* @param baseWriter RecordWriter to contain
* @param context current TaskAttemptContext
* @throws IOException
* @throws InterruptedException
*/
public DynamicPartitionFileRecordWriterContainer(
RecordWriter<? super WritableComparable<?>, ? super Writable> baseWriter,
TaskAttemptContext context) throws IOException, InterruptedException {
super(baseWriter, context);
maxDynamicPartitions = jobInfo.getMaxDynamicPartitions();
dynamicPartCols = jobInfo.getPosOfDynPartCols();
if (dynamicPartCols == null) {
throw new HCatException("It seems that setSchema() is not called on "
+ "HCatOutputFormat. Please make sure that method is called.");
}
this.baseDynamicSerDe = new HashMap<String, SerDe>();
this.baseDynamicWriters =
new HashMap<String, RecordWriter<? super WritableComparable<?>, ? super Writable>>();
this.baseDynamicCommitters = new HashMap<String, org.apache.hadoop.mapred.OutputCommitter>();
this.dynamicContexts = new HashMap<String, org.apache.hadoop.mapred.TaskAttemptContext>();
this.dynamicObjectInspectors = new HashMap<String, ObjectInspector>();
this.dynamicOutputJobInfo = new HashMap<String, OutputJobInfo>();
}
@Override
public void close(TaskAttemptContext context) throws IOException, InterruptedException {
Reporter reporter = InternalUtil.createReporter(context);
for (RecordWriter<? super WritableComparable<?>, ? super Writable> bwriter : baseDynamicWriters
.values()) {
// We are in RecordWriter.close() make sense that the context would be
// TaskInputOutput.
bwriter.close(reporter);
}
TaskCommitContextRegistry.getInstance().register(context, new TaskCommitContextRegistry.TaskCommitterProxy() {
@Override
public void abortTask(TaskAttemptContext context) throws IOException {
for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) {
String dynKey = outputJobInfoEntry.getKey();
OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue();
LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation());
baseDynamicCommitters.get(dynKey)
.abortTask(dynamicContexts.get(dynKey));
}
}
@Override
public void commitTask(TaskAttemptContext context) throws IOException {
for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) {
String dynKey = outputJobInfoEntry.getKey();
OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue();
LOG.info("Committing task-attempt for " + outputJobInfo.getLocation());
TaskAttemptContext dynContext = dynamicContexts.get(dynKey);
OutputCommitter dynCommitter = baseDynamicCommitters.get(dynKey);
if (dynCommitter.needsTaskCommit(dynContext)) {
dynCommitter.commitTask(dynContext);
}
else {
LOG.info("Skipping commitTask() for " + outputJobInfo.getLocation());
}
}
}
});
}
@Override
protected LocalFileWriter getLocalFileWriter(HCatRecord value) throws IOException, HCatException {
OutputJobInfo localJobInfo = null;
// Calculate which writer to use from the remaining values - this needs to
// be done before we delete cols.
List<String> dynamicPartValues = new ArrayList<String>();
for (Integer colToAppend : dynamicPartCols) {
dynamicPartValues.add(value.get(colToAppend).toString());
}
String dynKey = dynamicPartValues.toString();
if (!baseDynamicWriters.containsKey(dynKey)) {
if ((maxDynamicPartitions != -1) && (baseDynamicWriters.size() > maxDynamicPartitions)) {
throw new HCatException(ErrorType.ERROR_TOO_MANY_DYNAMIC_PTNS,
"Number of dynamic partitions being created "
+ "exceeds configured max allowable partitions[" + maxDynamicPartitions
+ "], increase parameter [" + HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS.varname
+ "] if needed.");
}
org.apache.hadoop.mapred.TaskAttemptContext currTaskContext =
HCatMapRedUtil.createTaskAttemptContext(context);
configureDynamicStorageHandler(currTaskContext, dynamicPartValues);
localJobInfo = HCatBaseOutputFormat.getJobInfo(currTaskContext.getConfiguration());
// Setup serDe.
SerDe currSerDe =
ReflectionUtils.newInstance(storageHandler.getSerDeClass(), currTaskContext.getJobConf());
try {
InternalUtil.initializeOutputSerDe(currSerDe, currTaskContext.getConfiguration(),
localJobInfo);
} catch (SerDeException e) {
throw new IOException("Failed to initialize SerDe", e);
}
// create base OutputFormat
org.apache.hadoop.mapred.OutputFormat baseOF =
ReflectionUtils.newInstance(storageHandler.getOutputFormatClass(),
currTaskContext.getJobConf());
// We are skipping calling checkOutputSpecs() for each partition
// As it can throw a FileAlreadyExistsException when more than one
// mapper is writing to a partition.
// See HCATALOG-490, also to avoid contacting the namenode for each new
// FileOutputFormat instance.
// In general this should be ok for most FileOutputFormat implementations
// but may become an issue for cases when the method is used to perform
// other setup tasks.
// Get Output Committer
org.apache.hadoop.mapred.OutputCommitter baseOutputCommitter =
currTaskContext.getJobConf().getOutputCommitter();
// Create currJobContext the latest so it gets all the config changes
org.apache.hadoop.mapred.JobContext currJobContext =
HCatMapRedUtil.createJobContext(currTaskContext);
// Set up job.
baseOutputCommitter.setupJob(currJobContext);
// Recreate to refresh jobConf of currTask context.
currTaskContext =
HCatMapRedUtil.createTaskAttemptContext(currJobContext.getJobConf(),
currTaskContext.getTaskAttemptID(), currTaskContext.getProgressible());
// Set temp location.
currTaskContext.getConfiguration().set(
"mapred.work.output.dir",
new FileOutputCommitter(new Path(localJobInfo.getLocation()), currTaskContext)
.getWorkPath().toString());
// Set up task.
baseOutputCommitter.setupTask(currTaskContext);
Path parentDir = new Path(currTaskContext.getConfiguration().get("mapred.work.output.dir"));
Path childPath =
new Path(parentDir, FileOutputFormat.getUniqueFile(currTaskContext, "part", ""));
RecordWriter baseRecordWriter =
baseOF.getRecordWriter(parentDir.getFileSystem(currTaskContext.getConfiguration()),
currTaskContext.getJobConf(), childPath.toString(),
InternalUtil.createReporter(currTaskContext));
baseDynamicWriters.put(dynKey, baseRecordWriter);
baseDynamicSerDe.put(dynKey, currSerDe);
baseDynamicCommitters.put(dynKey, baseOutputCommitter);
dynamicContexts.put(dynKey, currTaskContext);
dynamicObjectInspectors.put(dynKey,
InternalUtil.createStructObjectInspector(jobInfo.getOutputSchema()));
dynamicOutputJobInfo.put(dynKey,
HCatOutputFormat.getJobInfo(dynamicContexts.get(dynKey).getConfiguration()));
}
return new LocalFileWriter(baseDynamicWriters.get(dynKey), dynamicObjectInspectors.get(dynKey),
baseDynamicSerDe.get(dynKey), dynamicOutputJobInfo.get(dynKey));
}
protected void configureDynamicStorageHandler(JobContext context, List<String> dynamicPartVals)
throws IOException {
HCatOutputFormat.configureOutputStorageHandler(context, dynamicPartVals);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticfilesystem.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The latest known metered size (in bytes) of data stored in the file system, in its <code>Value</code> field, and the
* time at which that size was determined in its <code>Timestamp</code> field. The value doesn't represent the size of a
* consistent snapshot of the file system, but it is eventually consistent when there are no writes to the file system.
* That is, the value represents the actual size only if the file system is not modified for a period longer than a
* couple of hours. Otherwise, the value is not necessarily the exact size the file system was at any instant in time.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticfilesystem-2015-02-01/FileSystemSize" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class FileSystemSize implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The latest known metered size (in bytes) of data stored in the file system.
* </p>
*/
private Long value;
/**
* <p>
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is the
* integer number of seconds since 1970-01-01T00:00:00Z.
* </p>
*/
private java.util.Date timestamp;
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
* </p>
*/
private Long valueInIA;
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Standard storage class.
* </p>
*/
private Long valueInStandard;
/**
* <p>
* The latest known metered size (in bytes) of data stored in the file system.
* </p>
*
* @param value
* The latest known metered size (in bytes) of data stored in the file system.
*/
public void setValue(Long value) {
this.value = value;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the file system.
* </p>
*
* @return The latest known metered size (in bytes) of data stored in the file system.
*/
public Long getValue() {
return this.value;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the file system.
* </p>
*
* @param value
* The latest known metered size (in bytes) of data stored in the file system.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FileSystemSize withValue(Long value) {
setValue(value);
return this;
}
/**
* <p>
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is the
* integer number of seconds since 1970-01-01T00:00:00Z.
* </p>
*
* @param timestamp
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is
* the integer number of seconds since 1970-01-01T00:00:00Z.
*/
public void setTimestamp(java.util.Date timestamp) {
this.timestamp = timestamp;
}
/**
* <p>
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is the
* integer number of seconds since 1970-01-01T00:00:00Z.
* </p>
*
* @return The time at which the size of data, returned in the <code>Value</code> field, was determined. The value
* is the integer number of seconds since 1970-01-01T00:00:00Z.
*/
public java.util.Date getTimestamp() {
return this.timestamp;
}
/**
* <p>
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is the
* integer number of seconds since 1970-01-01T00:00:00Z.
* </p>
*
* @param timestamp
* The time at which the size of data, returned in the <code>Value</code> field, was determined. The value is
* the integer number of seconds since 1970-01-01T00:00:00Z.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FileSystemSize withTimestamp(java.util.Date timestamp) {
setTimestamp(timestamp);
return this;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
* </p>
*
* @param valueInIA
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
*/
public void setValueInIA(Long valueInIA) {
this.valueInIA = valueInIA;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
* </p>
*
* @return The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
*/
public Long getValueInIA() {
return this.valueInIA;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
* </p>
*
* @param valueInIA
* The latest known metered size (in bytes) of data stored in the Infrequent Access storage class.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FileSystemSize withValueInIA(Long valueInIA) {
setValueInIA(valueInIA);
return this;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Standard storage class.
* </p>
*
* @param valueInStandard
* The latest known metered size (in bytes) of data stored in the Standard storage class.
*/
public void setValueInStandard(Long valueInStandard) {
this.valueInStandard = valueInStandard;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Standard storage class.
* </p>
*
* @return The latest known metered size (in bytes) of data stored in the Standard storage class.
*/
public Long getValueInStandard() {
return this.valueInStandard;
}
/**
* <p>
* The latest known metered size (in bytes) of data stored in the Standard storage class.
* </p>
*
* @param valueInStandard
* The latest known metered size (in bytes) of data stored in the Standard storage class.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FileSystemSize withValueInStandard(Long valueInStandard) {
setValueInStandard(valueInStandard);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getValue() != null)
sb.append("Value: ").append(getValue()).append(",");
if (getTimestamp() != null)
sb.append("Timestamp: ").append(getTimestamp()).append(",");
if (getValueInIA() != null)
sb.append("ValueInIA: ").append(getValueInIA()).append(",");
if (getValueInStandard() != null)
sb.append("ValueInStandard: ").append(getValueInStandard());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof FileSystemSize == false)
return false;
FileSystemSize other = (FileSystemSize) obj;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
if (other.getTimestamp() == null ^ this.getTimestamp() == null)
return false;
if (other.getTimestamp() != null && other.getTimestamp().equals(this.getTimestamp()) == false)
return false;
if (other.getValueInIA() == null ^ this.getValueInIA() == null)
return false;
if (other.getValueInIA() != null && other.getValueInIA().equals(this.getValueInIA()) == false)
return false;
if (other.getValueInStandard() == null ^ this.getValueInStandard() == null)
return false;
if (other.getValueInStandard() != null && other.getValueInStandard().equals(this.getValueInStandard()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
hashCode = prime * hashCode + ((getTimestamp() == null) ? 0 : getTimestamp().hashCode());
hashCode = prime * hashCode + ((getValueInIA() == null) ? 0 : getValueInIA().hashCode());
hashCode = prime * hashCode + ((getValueInStandard() == null) ? 0 : getValueInStandard().hashCode());
return hashCode;
}
@Override
public FileSystemSize clone() {
try {
return (FileSystemSize) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.elasticfilesystem.model.transform.FileSystemSizeMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.structure;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.OperationRouting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESAllocationTestCase;
import org.junit.Test;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.*;
public class RoutingIteratorTests extends ESAllocationTestCase {
@Test
public void testEmptyIterator() {
ShardShuffler shuffler = new RotationShardShuffler(0);
ShardIterator shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
}
@Test
public void testIterator1() {
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.build();
ShardIterator shardIterator = routingTable.index("test1").shard(0).shardsIt(0);
assertThat(shardIterator.size(), equalTo(3));
ShardRouting shardRouting1 = shardIterator.nextOrNull();
assertThat(shardRouting1, notNullValue());
assertThat(shardIterator.remaining(), equalTo(2));
ShardRouting shardRouting2 = shardIterator.nextOrNull();
assertThat(shardRouting2, notNullValue());
assertThat(shardIterator.remaining(), equalTo(1));
assertThat(shardRouting2, not(sameInstance(shardRouting1)));
ShardRouting shardRouting3 = shardIterator.nextOrNull();
assertThat(shardRouting3, notNullValue());
assertThat(shardRouting3, not(sameInstance(shardRouting1)));
assertThat(shardRouting3, not(sameInstance(shardRouting2)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
}
@Test
public void testIterator2() {
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.addAsNew(metaData.index("test2"))
.build();
ShardIterator shardIterator = routingTable.index("test1").shard(0).shardsIt(0);
assertThat(shardIterator.size(), equalTo(2));
ShardRouting shardRouting1 = shardIterator.nextOrNull();
assertThat(shardRouting1, notNullValue());
assertThat(shardIterator.remaining(), equalTo(1));
ShardRouting shardRouting2 = shardIterator.nextOrNull();
assertThat(shardRouting2, notNullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardRouting2, not(sameInstance(shardRouting1)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
shardIterator = routingTable.index("test1").shard(0).shardsIt(1);
assertThat(shardIterator.size(), equalTo(2));
ShardRouting shardRouting3 = shardIterator.nextOrNull();
assertThat(shardRouting1, notNullValue());
ShardRouting shardRouting4 = shardIterator.nextOrNull();
assertThat(shardRouting2, notNullValue());
assertThat(shardRouting2, not(sameInstance(shardRouting1)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardRouting1, not(sameInstance(shardRouting3)));
assertThat(shardRouting2, not(sameInstance(shardRouting4)));
assertThat(shardRouting1, sameInstance(shardRouting4));
assertThat(shardRouting2, sameInstance(shardRouting3));
shardIterator = routingTable.index("test1").shard(0).shardsIt(2);
assertThat(shardIterator.size(), equalTo(2));
ShardRouting shardRouting5 = shardIterator.nextOrNull();
assertThat(shardRouting5, notNullValue());
ShardRouting shardRouting6 = shardIterator.nextOrNull();
assertThat(shardRouting6, notNullValue());
assertThat(shardRouting6, not(sameInstance(shardRouting5)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardRouting5, sameInstance(shardRouting1));
assertThat(shardRouting6, sameInstance(shardRouting2));
shardIterator = routingTable.index("test1").shard(0).shardsIt(3);
assertThat(shardIterator.size(), equalTo(2));
ShardRouting shardRouting7 = shardIterator.nextOrNull();
assertThat(shardRouting7, notNullValue());
ShardRouting shardRouting8 = shardIterator.nextOrNull();
assertThat(shardRouting8, notNullValue());
assertThat(shardRouting8, not(sameInstance(shardRouting7)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardRouting7, sameInstance(shardRouting3));
assertThat(shardRouting8, sameInstance(shardRouting4));
shardIterator = routingTable.index("test1").shard(0).shardsIt(4);
assertThat(shardIterator.size(), equalTo(2));
ShardRouting shardRouting9 = shardIterator.nextOrNull();
assertThat(shardRouting9, notNullValue());
ShardRouting shardRouting10 = shardIterator.nextOrNull();
assertThat(shardRouting10, notNullValue());
assertThat(shardRouting10, not(sameInstance(shardRouting9)));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardRouting9, sameInstance(shardRouting5));
assertThat(shardRouting10, sameInstance(shardRouting6));
}
@Test
public void testRandomRouting() {
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.addAsNew(metaData.index("test2"))
.build();
ShardIterator shardIterator = routingTable.index("test1").shard(0).shardsRandomIt();
ShardRouting shardRouting1 = shardIterator.nextOrNull();
assertThat(shardRouting1, notNullValue());
assertThat(shardIterator.nextOrNull(), notNullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
shardIterator = routingTable.index("test1").shard(0).shardsRandomIt();
ShardRouting shardRouting2 = shardIterator.nextOrNull();
assertThat(shardRouting2, notNullValue());
ShardRouting shardRouting3 = shardIterator.nextOrNull();
assertThat(shardRouting3, notNullValue());
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardRouting1, not(sameInstance(shardRouting2)));
assertThat(shardRouting1, sameInstance(shardRouting3));
}
@Test
public void testAttributePreferenceRouting() {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always")
.put("cluster.routing.allocation.awareness.attributes", "rack_id,zone")
.build());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node1", ImmutableMap.of("rack_id", "rack_1", "zone", "zone1")))
.put(newNode("node2", ImmutableMap.of("rack_id", "rack_2", "zone", "zone2")))
.localNodeId("node1")
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
// after all are started, check routing iteration
ShardIterator shardIterator = clusterState.routingTable().index("test").shard(0).preferAttributesActiveInitializingShardsIt(new String[]{"rack_id"}, clusterState.nodes());
ShardRouting shardRouting = shardIterator.nextOrNull();
assertThat(shardRouting, notNullValue());
assertThat(shardRouting.currentNodeId(), equalTo("node1"));
shardRouting = shardIterator.nextOrNull();
assertThat(shardRouting, notNullValue());
assertThat(shardRouting.currentNodeId(), equalTo("node2"));
shardIterator = clusterState.routingTable().index("test").shard(0).preferAttributesActiveInitializingShardsIt(new String[]{"rack_id"}, clusterState.nodes());
shardRouting = shardIterator.nextOrNull();
assertThat(shardRouting, notNullValue());
assertThat(shardRouting.currentNodeId(), equalTo("node1"));
shardRouting = shardIterator.nextOrNull();
assertThat(shardRouting, notNullValue());
assertThat(shardRouting.currentNodeId(), equalTo("node2"));
}
@Test
public void testNodeSelectorRouting(){
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always")
.build());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("fred","node1", ImmutableMap.of("disk", "ebs")))
.put(newNode("barney","node2", ImmutableMap.of("disk", "ephemeral")))
.localNodeId("node1")
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
ShardsIterator shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("disk:ebs",clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(1));
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1"));
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("dis*:eph*",clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(1));
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2"));
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("fred",clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(1));
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1"));
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("bar*",clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(1));
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2"));
shardsIterator = clusterState.routingTable().index("test").shard(0)
.onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:eph*","disk:ebs"},clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(2));
final List<ShardRouting> activeShards = clusterState.routingTable().index("test").shard(0).activeShards();
final List<ShardRouting> initializingShards = clusterState.routingTable().index("test").shardsWithState(INITIALIZING);
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(activeShards.get(0).currentNodeId()));
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(initializingShards.get(0).currentNodeId()));
shardsIterator = clusterState.routingTable().index("test").shard(0)
.onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:*", "invalid_name"},clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(2));
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(activeShards.get(0).currentNodeId()));
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(initializingShards.get(0).currentNodeId()));
shardsIterator = clusterState.routingTable().index("test").shard(0)
.onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:*", "disk:*"},clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(2));
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(activeShards.get(0).currentNodeId()));
assertThat(shardsIterator.nextOrNull().currentNodeId(), equalTo(initializingShards.get(0).currentNodeId()));
try {
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("welma", clusterState.nodes());
fail("shouldve raised illegalArgumentException");
} catch (IllegalArgumentException illegal) {
//expected exception
}
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("fred",clusterState.nodes());
assertThat(shardsIterator.size(), equalTo(1));
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1"));
}
@Test
public void testShardsAndPreferNodeRouting() {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.build());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node1"))
.put(newNode("node2"))
.localNodeId("node1")
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
OperationRouting operationRouting = new OperationRouting(Settings.Builder.EMPTY_SETTINGS, new AwarenessAllocationDecider());
GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:1");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(1));
//check node preference, first without preference to see they switch
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0;");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
String firstRoundNodeId = shardIterators.iterator().next().nextOrNull().currentNodeId();
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
assertThat(shardIterators.iterator().next().nextOrNull().currentNodeId(), not(equalTo(firstRoundNodeId)));
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0;_prefer_node:node1");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
assertThat(shardIterators.iterator().next().nextOrNull().currentNodeId(), equalTo("node1"));
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_shards:0;_prefer_node:node1");
assertThat(shardIterators.size(), equalTo(1));
assertThat(shardIterators.iterator().next().shardId().id(), equalTo(0));
assertThat(shardIterators.iterator().next().nextOrNull().currentNodeId(), equalTo("node1"));
}
@Test
public void testReplicaShardPreferenceIters() throws Exception {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.build());
OperationRouting operationRouting = new OperationRouting(Settings.Builder.EMPTY_SETTINGS, new AwarenessAllocationDecider());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node1"))
.put(newNode("node2"))
.put(newNode("node3"))
.localNodeId("node1")
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
// When replicas haven't initialized, it comes back with the primary first, then initializing replicas
GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
ShardIterator iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
ShardRouting routing = iter.nextOrNull();
assertNotNull(routing);
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertTrue(routing.primary()); // replicas haven't initialized yet, so primary is first
assertTrue(routing.started());
routing = iter.nextOrNull();
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
assertTrue(routing.initializing());
routing = iter.nextOrNull();
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
assertTrue(routing.initializing());
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingTable = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(2)); // two potential replicas for the shard
routing = iter.nextOrNull();
assertNotNull(routing);
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
routing = iter.nextOrNull();
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
routing = iter.nextOrNull();
assertNotNull(routing);
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
routing = iter.nextOrNull();
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertFalse(routing.primary());
// finally the primary
routing = iter.nextOrNull();
assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
assertTrue(routing.primary());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.