gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2014 Alexander Gulko <kirhog at gmail dot com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fastmongo.odm.bson.mapping.core;
import com.mongodb.BasicDBObject;
import org.bson.BSON;
import org.bson.BasicBSONEncoder;
import org.bson.types.Code;
import org.bson.types.ObjectId;
import org.fastmongo.odm.model.user.Group;
import org.fastmongo.odm.model.user.User;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* @author Alexander Gulko
*/
public class BsonReaderTest {
@Test
public void testFieldNamesCache() {
BsonToDomainConverter converter = new BsonToDomainConverter();
List<Class<?>> classes = Arrays.<Class<?>>asList(
User.class, Group.class);
for (Class<?> clazz : classes) {
BasicDBObject db = new BasicDBObject();
Map<String, Field> fieldMap = converter.getFields(clazz);
for (Field field : fieldMap.values()) {
db.append(field.getName(), "dummy");
}
byte[] data = new BasicBSONEncoder().encode(db);
BsonReader reader = new BsonReader(data);
int size = reader.readInt();
Assert.assertEquals(size, data.length);
for (Field field : fieldMap.values()) {
byte type = reader.readType();
String fieldName = reader.readFieldNameCached();
String value = reader.readString();
Assert.assertEquals(type, BSON.STRING);
Assert.assertEquals(fieldName, field.getName());
Assert.assertEquals(value, "dummy");
}
}
}
@Test
public void test_numberField() {
BasicDBObject db = new BasicDBObject("field", 123.4);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readDouble(), 123.4);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipDouble();
assertEndOfObject(reader);
}
}
@Test
public void test_stringField() {
BasicDBObject db = new BasicDBObject("field", "string");
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.STRING);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readString(), "string");
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.STRING);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipString();
assertEndOfObject(reader);
}
}
@Test
public void test_objectField() {
BasicDBObject db = new BasicDBObject("field", new BasicDBObject());
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.OBJECT);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipObject();
assertEndOfObject(reader);
}
}
@Test
public void test_arrayField() {
int[] value = {1, 2, 3};
BasicDBObject db = new BasicDBObject("field", value);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.ARRAY);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipObject();
assertEndOfObject(reader);
}
}
@Test
public void test_listField() {
List<Integer> value = Arrays.asList(1, 2, 3);
BasicDBObject db = new BasicDBObject("field", value);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.ARRAY);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipObject();
assertEndOfObject(reader);
}
}
@Test
public void test_objectIdField() {
ObjectId value = new ObjectId();
BasicDBObject db = new BasicDBObject("field", value);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.OID);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readObjectId(), value);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.OID);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipObjectId();
assertEndOfObject(reader);
}
}
@Test
public void test_booleanField() {
BasicDBObject db = new BasicDBObject("field", true);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.BOOLEAN);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readBoolean(), true);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.BOOLEAN);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipBoolean();
assertEndOfObject(reader);
}
}
@Test
public void test_dateField() {
Date value = new Date();
BasicDBObject db = new BasicDBObject("field", value);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.DATE);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readDate(), value);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.DATE);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipDate();
assertEndOfObject(reader);
}
}
@Test
public void test_nullField() {
BasicDBObject db = new BasicDBObject("field", null);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NULL);
Assert.assertEquals(reader.readFieldName(), "field");
assertEndOfObject(reader);
}
}
@Test
public void test_symbolField() {
BasicDBObject db = new BasicDBObject("field", 'x');
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.STRING);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readString(), "x");
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.STRING);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipString();
assertEndOfObject(reader);
}
}
@Test
public void test_numberIntField() {
BasicDBObject db = new BasicDBObject("field", 123);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER_INT);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readInt(), 123);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER_INT);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipInt();
assertEndOfObject(reader);
}
}
@Test
public void test_numberLongField() {
BasicDBObject db = new BasicDBObject("field", 123L);
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER_LONG);
Assert.assertEquals(reader.readFieldName(), "field");
Assert.assertEquals(reader.readLong(), 123L);
assertEndOfObject(reader);
}
{
BsonReader reader = getReader(db);
reader.skipInt();
Assert.assertEquals(reader.readType(), BSON.NUMBER_LONG);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipLong();
assertEndOfObject(reader);
}
}
@Test
public void test_skipField() {
BasicDBObject db = new BasicDBObject("field1", 1)
.append("field2", 2);
BsonReader reader = getReader(db);
reader.skipInt();
reader.skipField();
reader.skipField();
assertEndOfObject(reader);
}
@Test(dataProvider = "bsonTypesWithValues")
public void test_skipFieldType(byte type, Object value) {
BasicDBObject db = new BasicDBObject("field", value);
BsonReader reader = getReader(db);
reader.skipInt();
byte fieldType = reader.readType();
Assert.assertEquals(fieldType, type);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipField(fieldType);
assertEndOfObject(reader);
}
@DataProvider(name = "bsonTypesWithValues")
public Object[][] getSamplesForBsonTypes() {
return new Object[][]{
new Object[]{BSON.NULL, null},
new Object[]{BSON.BOOLEAN, true},
new Object[]{BSON.NUMBER, 123.4},
new Object[]{BSON.NUMBER_INT, 123},
new Object[]{BSON.NUMBER_LONG, 123L},
new Object[]{BSON.STRING, 'x'},
new Object[]{BSON.STRING, "xxx"},
new Object[]{BSON.OID, new ObjectId()},
new Object[]{BSON.DATE, new Date()},
new Object[]{BSON.ARRAY, new int[]{1, 2, 3}},
new Object[]{BSON.ARRAY, Arrays.asList(1, 2, 3)},
new Object[]{BSON.OBJECT, new BasicDBObject()}
};
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void test_skipFieldOfUnknownType_Regex() {
BasicDBObject db = new BasicDBObject("field", Pattern.compile("."));
BsonReader reader = getReader(db);
reader.skipInt();
byte fieldType = reader.readType();
Assert.assertEquals(fieldType, BSON.REGEX);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipField(fieldType);
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void test_skipFieldOfUnknownType_Code() {
BasicDBObject db = new BasicDBObject("field", new Code("some code"));
BsonReader reader = getReader(db);
reader.skipInt();
byte fieldType = reader.readType();
Assert.assertEquals(fieldType, BSON.CODE);
Assert.assertEquals(reader.readFieldName(), "field");
reader.skipField(fieldType);
}
private void assertEndOfObject(BsonReader reader) {
Assert.assertEquals(reader.readType(), BSON.EOO);
}
private BsonReader getReader(BasicDBObject db) {
return new BsonReader(encode(db));
}
private byte[] encode(BasicDBObject db) {
return new BasicBSONEncoder().encode(db);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.jpm.mr.running;
import com.typesafe.config.ConfigFactory;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.curator.test.TestingServer;
import org.apache.curator.utils.CloseableUtils;
import org.apache.eagle.jpm.mr.running.recover.MRRunningJobManager;
import org.apache.eagle.jpm.util.jobrecover.RunningJobManager;
import org.apache.zookeeper.CreateMode;
import org.junit.*;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
@RunWith(PowerMockRunner.class)
@PrepareForTest({MRRunningJobManager.class, RunningJobManager.class, LoggerFactory.class})
@PowerMockIgnore({"javax.*"})
public class MRRunningJobManagerTest {
private static TestingServer zk;
private static com.typesafe.config.Config config = ConfigFactory.load();
private static CuratorFramework curator;
private static final String SHARE_RESOURCES = "/apps/mr/running/sandbox/yarnAppId/jobId";
private static final int QTY = 5;
private static final int REPETITIONS = QTY * 10;
private static MRRunningJobConfig.EndpointConfig endpointConfig;
private static MRRunningJobConfig.ZKStateConfig zkStateConfig;
private static org.slf4j.Logger log = mock(org.slf4j.Logger.class);
private static final int BUFFER_SIZE = 4096;
private static final String LOCKS_BASE_PATH = "/locks";
@BeforeClass
public static void setupZookeeper() throws Exception {
zk = new TestingServer();
curator = CuratorFrameworkFactory.newClient(zk.getConnectString(), new ExponentialBackoffRetry(1000, 3));
curator.start();
MRRunningJobConfig mrRunningJobConfig = MRRunningJobConfig.newInstance(config);
zkStateConfig = mrRunningJobConfig.getZkStateConfig();
zkStateConfig.zkQuorum = zk.getConnectString();
endpointConfig = mrRunningJobConfig.getEndpointConfig();
mockStatic(LoggerFactory.class);
when(LoggerFactory.getLogger(any(Class.class))).thenReturn(log);
}
@AfterClass
public static void teardownZookeeper() throws Exception {
CloseableUtils.closeQuietly(curator);
CloseableUtils.closeQuietly(zk);
}
@Before
public void createPath() throws Exception {
if(curator.checkExists().forPath(SHARE_RESOURCES) == null) {
curator.create()
.creatingParentsIfNeeded()
.withMode(CreateMode.PERSISTENT)
.forPath(SHARE_RESOURCES);
}
}
@After
public void cleanPath() throws Exception {
if (curator.checkExists().forPath(SHARE_RESOURCES) != null) {
curator.delete().deletingChildrenIfNeeded().forPath(SHARE_RESOURCES);
}
if (curator.checkExists().forPath(LOCKS_BASE_PATH) != null) {
curator.delete().guaranteed().deletingChildrenIfNeeded().forPath(LOCKS_BASE_PATH);
}
}
@Test
@Ignore
public void testMRRunningJobManagerDelWithLock() throws Exception {
Assert.assertTrue(curator.checkExists().forPath(SHARE_RESOURCES) != null);
ExecutorService service = Executors.newFixedThreadPool(QTY);
for (int i = 0; i < QTY; ++i) {
Callable<Void> task = () -> {
try {
MRRunningJobManager mrRunningJobManager = new MRRunningJobManager(zkStateConfig);
for (int j = 0; j < REPETITIONS; ++j) {
mrRunningJobManager.delete("yarnAppId", "jobId");
}
} catch (Exception e) {
// log or do something
}
return null;
};
service.submit(task);
}
service.shutdown();
service.awaitTermination(10, TimeUnit.MINUTES);
Assert.assertTrue(curator.checkExists().forPath(SHARE_RESOURCES) == null);
verify(log, never()).error(anyString(), anyString(), anyString(), anyString(), any(Throwable.class));
verify(log, never()).error(anyString(), anyString(), anyString());
verify(log, never()).error(anyString(), any(Throwable.class));
}
@Test
@Ignore
public void testMRRunningJobManagerRecoverYarnAppWithLock() throws Exception {
Assert.assertTrue(curator.checkExists().forPath(SHARE_RESOURCES) != null);
curator.setData().forPath(SHARE_RESOURCES, generateZkSetData());
ExecutorService service = Executors.newFixedThreadPool(QTY);
for (int i = 0; i < QTY; ++i) {
Callable<Void> task = () -> {
try {
MRRunningJobManager mrRunningJobManager = new MRRunningJobManager(zkStateConfig);
for (int j = 0; j < REPETITIONS; ++j) {
if(j % 3 == 0) {
mrRunningJobManager.delete("yarnAppId", "jobId");
} else {
mrRunningJobManager.recoverYarnApp("yarnAppId");
}
}
} catch (Exception e) {
// log or do something
}
return null;
};
service.submit(task);
}
service.shutdown();
service.awaitTermination(10, TimeUnit.MINUTES);
verify(log, never()).error(anyString(), any(Throwable.class));
}
@Test
public void testMRRunningJobManagerRecoverWithLock() throws Exception {
Assert.assertTrue(curator.checkExists().forPath(SHARE_RESOURCES) != null);
curator.setData().forPath(SHARE_RESOURCES, generateZkSetData());
ExecutorService service = Executors.newFixedThreadPool(QTY);
for (int i = 0; i < QTY; ++i) {
Callable<Void> task = () -> {
try {
MRRunningJobManager mrRunningJobManager = new MRRunningJobManager(zkStateConfig);
for (int j = 0; j < REPETITIONS; ++j) {
if(j % 3 == 0) {
mrRunningJobManager.delete("yarnAppId", "jobId");
} else {
mrRunningJobManager.recover();
}
}
} catch (Exception e) {
// log or do something
}
return null;
};
service.submit(task);
}
service.shutdown();
service.awaitTermination(10, TimeUnit.MINUTES);
verify(log, never()).error(anyString(), any(Throwable.class));
}
private byte[] generateZkSetData() throws IOException {
InputStream jsonstream = this.getClass().getResourceAsStream("/jobInfo_805.json");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] data = new byte[BUFFER_SIZE];
int count = -1;
while((count = jsonstream.read(data, 0, BUFFER_SIZE)) != -1) {
outputStream.write(data, 0, count);
}
data = null;
return outputStream.toByteArray();
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ddmlib.testrunner;
import com.android.ddmlib.AdbCommandRejectedException;
import com.android.ddmlib.IDevice;
import com.android.ddmlib.Log;
import com.android.ddmlib.ShellCommandUnresponsiveException;
import com.android.ddmlib.TimeoutException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Hashtable;
import java.util.Map;
import java.util.Map.Entry;
/**
* Runs a Android test command remotely and reports results.
*/
public class RemoteAndroidTestRunner implements IRemoteAndroidTestRunner {
private final String mPackageName;
private final String mRunnerName;
private IDevice mRemoteDevice;
// default to no timeout
private int mMaxTimeToOutputResponse = 0;
private String mRunName = null;
/** map of name-value instrumentation argument pairs */
private Map<String, String> mArgMap;
private InstrumentationResultParser mParser;
private static final String LOG_TAG = "RemoteAndroidTest";
private static final String DEFAULT_RUNNER_NAME = "android.test.InstrumentationTestRunner";
private static final char CLASS_SEPARATOR = ',';
private static final char METHOD_SEPARATOR = '#';
private static final char RUNNER_SEPARATOR = '/';
// defined instrumentation argument names
private static final String CLASS_ARG_NAME = "class";
private static final String LOG_ARG_NAME = "log";
private static final String DEBUG_ARG_NAME = "debug";
private static final String COVERAGE_ARG_NAME = "coverage";
private static final String PACKAGE_ARG_NAME = "package";
private static final String SIZE_ARG_NAME = "size";
/**
* Creates a remote Android test runner.
*
* @param packageName the Android application package that contains the tests to run
* @param runnerName the instrumentation test runner to execute. If null, will use default
* runner
* @param remoteDevice the Android device to execute tests on
*/
public RemoteAndroidTestRunner(String packageName,
String runnerName,
IDevice remoteDevice) {
mPackageName = packageName;
mRunnerName = runnerName;
mRemoteDevice = remoteDevice;
mArgMap = new Hashtable<String, String>();
}
/**
* Alternate constructor. Uses default instrumentation runner.
*
* @param packageName the Android application package that contains the tests to run
* @param remoteDevice the Android device to execute tests on
*/
public RemoteAndroidTestRunner(String packageName,
IDevice remoteDevice) {
this(packageName, null, remoteDevice);
}
/**
* {@inheritDoc}
*/
@Override
public String getPackageName() {
return mPackageName;
}
/**
* {@inheritDoc}
*/
@Override
public String getRunnerName() {
if (mRunnerName == null) {
return DEFAULT_RUNNER_NAME;
}
return mRunnerName;
}
/**
* Returns the complete instrumentation component path.
*/
private String getRunnerPath() {
return getPackageName() + RUNNER_SEPARATOR + getRunnerName();
}
/**
* {@inheritDoc}
*/
@Override
public void setClassName(String className) {
addInstrumentationArg(CLASS_ARG_NAME, className);
}
/**
* {@inheritDoc}
*/
@Override
public void setClassNames(String[] classNames) {
StringBuilder classArgBuilder = new StringBuilder();
for (int i = 0; i < classNames.length; i++) {
if (i != 0) {
classArgBuilder.append(CLASS_SEPARATOR);
}
classArgBuilder.append(classNames[i]);
}
setClassName(classArgBuilder.toString());
}
/**
* {@inheritDoc}
*/
@Override
public void setMethodName(String className, String testName) {
setClassName(className + METHOD_SEPARATOR + testName);
}
/**
* {@inheritDoc}
*/
@Override
public void setTestPackageName(String packageName) {
addInstrumentationArg(PACKAGE_ARG_NAME, packageName);
}
/**
* {@inheritDoc}
*/
@Override
public void addInstrumentationArg(String name, String value) {
if (name == null || value == null) {
throw new IllegalArgumentException("name or value arguments cannot be null");
}
mArgMap.put(name, value);
}
/**
* {@inheritDoc}
*/
@Override
public void removeInstrumentationArg(String name) {
if (name == null) {
throw new IllegalArgumentException("name argument cannot be null");
}
mArgMap.remove(name);
}
/**
* {@inheritDoc}
*/
@Override
public void addBooleanArg(String name, boolean value) {
addInstrumentationArg(name, Boolean.toString(value));
}
/**
* {@inheritDoc}
*/
@Override
public void setLogOnly(boolean logOnly) {
addBooleanArg(LOG_ARG_NAME, logOnly);
}
/**
* {@inheritDoc}
*/
@Override
public void setDebug(boolean debug) {
addBooleanArg(DEBUG_ARG_NAME, debug);
}
/**
* {@inheritDoc}
*/
@Override
public void setCoverage(boolean coverage) {
addBooleanArg(COVERAGE_ARG_NAME, coverage);
}
/**
* {@inheritDoc}
*/
@Override
public void setTestSize(TestSize size) {
addInstrumentationArg(SIZE_ARG_NAME, size.getRunnerValue());
}
/**
* {@inheritDoc}
*/
@Override
public void setMaxtimeToOutputResponse(int maxTimeToOutputResponse) {
mMaxTimeToOutputResponse = maxTimeToOutputResponse;
}
/**
* {@inheritDoc}
*/
@Override
public void setRunName(String runName) {
mRunName = runName;
}
/**
* {@inheritDoc}
*/
@Override
public void run(ITestRunListener... listeners)
throws TimeoutException, AdbCommandRejectedException, ShellCommandUnresponsiveException,
IOException {
run(Arrays.asList(listeners));
}
/**
* {@inheritDoc}
*/
@Override
public void run(Collection<ITestRunListener> listeners)
throws TimeoutException, AdbCommandRejectedException, ShellCommandUnresponsiveException,
IOException {
final String runCaseCommandStr = String.format("am instrument -w -r %1$s %2$s",
getArgsCommand(), getRunnerPath());
Log.i(LOG_TAG, String.format("Running %1$s on %2$s", runCaseCommandStr,
mRemoteDevice.getSerialNumber()));
String runName = mRunName == null ? mPackageName : mRunName;
mParser = new InstrumentationResultParser(runName, listeners);
try {
mRemoteDevice.executeShellCommand(runCaseCommandStr, mParser, mMaxTimeToOutputResponse);
} catch (IOException e) {
Log.w(LOG_TAG, String.format("IOException %1$s when running tests %2$s on %3$s",
e.toString(), getPackageName(), mRemoteDevice.getSerialNumber()));
// rely on parser to communicate results to listeners
mParser.handleTestRunFailed(e.toString());
throw e;
} catch (ShellCommandUnresponsiveException e) {
Log.w(LOG_TAG, String.format(
"ShellCommandUnresponsiveException %1$s when running tests %2$s on %3$s",
e.toString(), getPackageName(), mRemoteDevice.getSerialNumber()));
mParser.handleTestRunFailed(String.format(
"Failed to receive adb shell test output within %1$d ms. " +
"Test may have timed out, or adb connection to device became unresponsive",
mMaxTimeToOutputResponse));
throw e;
} catch (TimeoutException e) {
Log.w(LOG_TAG, String.format(
"TimeoutException when running tests %1$s on %2$s", getPackageName(),
mRemoteDevice.getSerialNumber()));
mParser.handleTestRunFailed(e.toString());
throw e;
} catch (AdbCommandRejectedException e) {
Log.w(LOG_TAG, String.format(
"AdbCommandRejectedException %1$s when running tests %2$s on %3$s",
e.toString(), getPackageName(), mRemoteDevice.getSerialNumber()));
mParser.handleTestRunFailed(e.toString());
throw e;
}
}
/**
* {@inheritDoc}
*/
@Override
public void cancel() {
if (mParser != null) {
mParser.cancel();
}
}
/**
* Returns the full instrumentation command line syntax for the provided instrumentation
* arguments.
* Returns an empty string if no arguments were specified.
*/
private String getArgsCommand() {
StringBuilder commandBuilder = new StringBuilder();
for (Entry<String, String> argPair : mArgMap.entrySet()) {
final String argCmd = String.format(" -e %1$s %2$s", argPair.getKey(),
argPair.getValue());
commandBuilder.append(argCmd);
}
return commandBuilder.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.version;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.JcrConstants.JCR_BASEVERSION;
import static org.apache.jackrabbit.JcrConstants.JCR_ISCHECKEDOUT;
import static org.apache.jackrabbit.JcrConstants.JCR_UUID;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.MISSING_NODE;
import static org.apache.jackrabbit.oak.plugins.version.VersionConstants.RESTORE_PREFIX;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.lock.LockConstants;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.Editor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
/**
* TODO document
*/
class VersionEditor implements Editor {
private final VersionEditor parent;
private final ReadWriteVersionManager vMgr;
private final NodeBuilder node;
private final String name;
private Boolean isVersionable = null;
private NodeState before;
private NodeState after;
private boolean isReadOnly;
private CommitInfo commitInfo;
public VersionEditor(@Nonnull NodeBuilder versionStore,
@Nonnull NodeBuilder workspaceRoot,
@Nonnull CommitInfo commitInfo) {
this(null, new ReadWriteVersionManager(checkNotNull(versionStore),
checkNotNull(workspaceRoot)), workspaceRoot, "", commitInfo);
}
VersionEditor(@Nullable VersionEditor parent,
@Nonnull ReadWriteVersionManager vMgr,
@Nonnull NodeBuilder node,
@Nonnull String name,
@Nonnull CommitInfo commitInfo) {
this.parent = parent;
this.vMgr = checkNotNull(vMgr);
this.node = checkNotNull(node);
this.name = checkNotNull(name);
this.commitInfo = commitInfo;
}
@Override
public void enter(NodeState before, NodeState after)
throws CommitFailedException {
this.before = before;
this.after = after;
if (isVersionable()) {
vMgr.getOrCreateVersionHistory(node, commitInfo.getInfo());
}
// calculate isReadOnly state
if (after.exists() || isVersionable()) {
// deleted or versionable -> check if it was checked in
// a node cannot be modified if it was checked in
// unless it has a new identifier
isReadOnly = wasCheckedIn() && !hasNewIdentifier();
} else {
// otherwise inherit from parent
isReadOnly = parent != null && parent.isReadOnly;
}
}
@Override
public void leave(NodeState before, NodeState after)
throws CommitFailedException {
}
@Override
public void propertyAdded(PropertyState after)
throws CommitFailedException {
if (after.getName().equals(JCR_BASEVERSION)
&& this.after.hasProperty(JcrConstants.JCR_VERSIONHISTORY)
&& !this.after.hasProperty(JCR_ISCHECKEDOUT)
&& !this.before.exists()) {
// sentinel node for restore
vMgr.restore(node, after.getValue(Type.REFERENCE), null);
return;
}
if (!isReadOnly) {
return;
}
// JCR allows to put a lock on a checked in node.
if (after.getName().equals(JcrConstants.JCR_LOCKOWNER)
|| after.getName().equals(JcrConstants.JCR_LOCKISDEEP)) {
return;
}
throwCheckedIn("Cannot add property " + after.getName()
+ " on checked in node");
}
@Override
public void propertyChanged(PropertyState before, PropertyState after)
throws CommitFailedException {
if (!isVersionable()) {
if (!isVersionProperty(after) && isReadOnly) {
throwCheckedIn("Cannot change property " + after.getName()
+ " on checked in node");
}
return;
}
String propName = after.getName();
if (propName.equals(JCR_ISCHECKEDOUT)) {
if (wasCheckedIn()) {
vMgr.checkout(node);
} else {
vMgr.checkin(node);
}
} else if (propName.equals(JCR_BASEVERSION)) {
String baseVersion = after.getValue(Type.REFERENCE);
if (baseVersion.startsWith(RESTORE_PREFIX)) {
baseVersion = baseVersion.substring(RESTORE_PREFIX.length());
node.setProperty(JCR_BASEVERSION, baseVersion, Type.REFERENCE);
}
vMgr.restore(node, baseVersion, null);
} else if (isVersionProperty(after)) {
throwProtected(after.getName());
} else if (isReadOnly) {
throwCheckedIn("Cannot change property " + after.getName()
+ " on checked in node");
}
}
@Override
public void propertyDeleted(PropertyState before)
throws CommitFailedException {
if (isReadOnly) {
if (!isVersionProperty(before) && !isLockProperty(before)) {
throwCheckedIn("Cannot delete property on checked in node");
}
}
}
@Override
public Editor childNodeAdded(String name, NodeState after) {
return childNodeChanged(name, MISSING_NODE, after);
}
@Override
public Editor childNodeChanged(String name, NodeState before,
NodeState after) {
return new VersionEditor(this, vMgr, node.child(name), name, commitInfo);
}
@Override
public Editor childNodeDeleted(String name, NodeState before) {
return new VersionEditor(this, vMgr, MISSING_NODE.builder(), name, commitInfo);
}
/**
* Returns {@code true} if the node of this VersionDiff is versionable;
* {@code false} otherwise.
*
* @return whether the node is versionable.
*/
private boolean isVersionable() {
if (isVersionable == null) {
isVersionable = vMgr.isVersionable(after);
}
return isVersionable;
}
private boolean isVersionProperty(PropertyState state) {
return VersionConstants.VERSION_PROPERTY_NAMES
.contains(state.getName());
}
private boolean isLockProperty(PropertyState state) {
return LockConstants.LOCK_PROPERTY_NAMES.contains(state.getName());
}
/**
* @return {@code true} if this node <b>was</b> checked in. That is,
* this method checks the before state for the jcr:isCheckedOut
* property.
*/
private boolean wasCheckedIn() {
PropertyState prop = before.getProperty(JCR_ISCHECKEDOUT);
if (prop != null) {
return !prop.getValue(Type.BOOLEAN);
}
// new node or not versionable, check parent
return parent != null && parent.wasCheckedIn();
}
private boolean hasNewIdentifier() {
String beforeId = buildBeforeIdentifier(new StringBuilder()).toString();
String afterId = buildAfterIdentifier(new StringBuilder()).toString();
return !beforeId.equals(afterId);
}
private StringBuilder buildBeforeIdentifier(StringBuilder identifier) {
String uuid = before.getString(JCR_UUID);
if (uuid != null) {
identifier.append(uuid);
} else if (parent != null) {
parent.buildBeforeIdentifier(identifier);
identifier.append("/").append(name);
}
return identifier;
}
private StringBuilder buildAfterIdentifier(StringBuilder identifier) {
String uuid = after.getString(JCR_UUID);
if (uuid != null) {
identifier.append(uuid);
} else if (parent != null) {
parent.buildAfterIdentifier(identifier);
identifier.append("/").append(name);
}
return identifier;
}
private static void throwCheckedIn(String msg)
throws CommitFailedException {
throw new CommitFailedException(CommitFailedException.VERSION,
VersionExceptionCode.NODE_CHECKED_IN.ordinal(), msg);
}
private static void throwProtected(String name)
throws CommitFailedException {
throw new CommitFailedException(CommitFailedException.CONSTRAINT, 100,
"Property is protected: " + name);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FileUtil.HardLink;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume;
import org.apache.hadoop.io.IOUtils;
/**
* This class is used by datanodes to maintain meta data of its replicas.
* It provides a general interface for meta information of a replica.
*/
abstract public class ReplicaInfo extends Block implements Replica {
private FSVolume volume; // volume where the replica belongs
private File dir; // directory where block & meta files belong
/**
* Constructor for a zero length replica
* @param blockId block id
* @param genStamp replica generation stamp
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
*/
ReplicaInfo(long blockId, long genStamp, FSVolume vol, File dir) {
this( blockId, 0L, genStamp, vol, dir);
}
/**
* Constructor
* @param block a block
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
*/
ReplicaInfo(Block block, FSVolume vol, File dir) {
this(block.getBlockId(), block.getNumBytes(),
block.getGenerationStamp(), vol, dir);
}
/**
* Constructor
* @param blockId block id
* @param len replica length
* @param genStamp replica generation stamp
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
*/
ReplicaInfo(long blockId, long len, long genStamp,
FSVolume vol, File dir) {
super(blockId, len, genStamp);
this.volume = vol;
this.dir = dir;
}
/**
* Get this replica's meta file name
* @return this replica's meta file name
*/
private String getMetaFileName() {
return getBlockName() + "_" + getGenerationStamp() + METADATA_EXTENSION;
}
/**
* Get the full path of this replica's data file
* @return the full path of this replica's data file
*/
File getBlockFile() {
return new File(getDir(), getBlockName());
}
/**
* Get the full path of this replica's meta file
* @return the full path of this replica's meta file
*/
File getMetaFile() {
return new File(getDir(), getMetaFileName());
}
/**
* Get the volume where this replica is located on disk
* @return the volume where this replica is located on disk
*/
FSVolume getVolume() {
return volume;
}
/**
* Set the volume where this replica is located on disk
*/
void setVolume(FSVolume vol) {
this.volume = vol;
}
/**
* Return the parent directory path where this replica is located
* @return the parent directory path where this replica is located
*/
File getDir() {
return dir;
}
/**
* Set the parent directory where this replica is located
* @param dir the parent directory where the replica is located
*/
void setDir(File dir) {
this.dir = dir;
}
/**
* check if this replica has already been unlinked.
* @return true if the replica has already been unlinked
* or no need to be detached; false otherwise
*/
boolean isUnlinked() {
return true; // no need to be unlinked
}
/**
* set that this replica is unlinked
*/
void setUnlinked() {
// no need to be unlinked
}
/**
* Copy specified file into a temporary file. Then rename the
* temporary file to the original name. This will cause any
* hardlinks to the original file to be removed. The temporary
* files are created in the same directory. The temporary files will
* be recovered (especially on Windows) on datanode restart.
*/
private void unlinkFile(File file, Block b) throws IOException {
File tmpFile = FSDataset.createTmpFile(b, FSDataset.getUnlinkTmpFile(file));
try {
FileInputStream in = new FileInputStream(file);
try {
FileOutputStream out = new FileOutputStream(tmpFile);
try {
IOUtils.copyBytes(in, out, 16*1024);
} finally {
out.close();
}
} finally {
in.close();
}
if (file.length() != tmpFile.length()) {
throw new IOException("Copy of file " + file + " size " + file.length()+
" into file " + tmpFile +
" resulted in a size of " + tmpFile.length());
}
FileUtil.replaceFile(tmpFile, file);
} catch (IOException e) {
boolean done = tmpFile.delete();
if (!done) {
DataNode.LOG.info("detachFile failed to delete temporary file " +
tmpFile);
}
throw e;
}
}
/**
* Remove a hard link by copying the block to a temporary place and
* then moving it back
* @param numLinks number of hard links
* @return true if copy is successful;
* false if it is already detached or no need to be detached
* @throws IOException if there is any copy error
*/
boolean unlinkBlock(int numLinks) throws IOException {
if (isUnlinked()) {
return false;
}
File file = getBlockFile();
if (file == null || getVolume() == null) {
throw new IOException("detachBlock:Block not found. " + this);
}
File meta = getMetaFile();
if (meta == null) {
throw new IOException("Meta file not found for block " + this);
}
if (HardLink.getLinkCount(file) > numLinks) {
DataNode.LOG.info("CopyOnWrite for block " + this);
unlinkFile(file, this);
}
if (HardLink.getLinkCount(meta) > numLinks) {
unlinkFile(meta, this);
}
setUnlinked();
return true;
}
/**
* Set this replica's generation stamp to be a newer one
* @param newGS new generation stamp
* @throws IOException is the new generation stamp is not greater than the current one
*/
void setNewerGenerationStamp(long newGS) throws IOException {
long curGS = getGenerationStamp();
if (newGS <= curGS) {
throw new IOException("New generation stamp (" + newGS
+ ") must be greater than current one (" + curGS + ")");
}
setGenerationStamp(newGS);
}
@Override //Object
public String toString() {
return getClass().getSimpleName()
+ ", " + super.toString()
+ ", " + getState()
+ "\n getNumBytes() = " + getNumBytes()
+ "\n getBytesOnDisk() = " + getBytesOnDisk()
+ "\n getVisibleLength()= " + getVisibleLength()
+ "\n getVolume() = " + getVolume()
+ "\n getBlockFile() = " + getBlockFile();
}
}
| |
/*******************************************************************************
* Copyright 2015 Dorian Perkins, Younghwan Go, Nitin Agrawal, Akshat Aranya
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.necla.simba.client;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import net.jarlehansen.protobuf.javame.ByteString;
import org.iq80.leveldb.ReadOptions;
import android.app.Service;
import android.content.ContentValues;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.RemoteException;
import android.util.Log;
import com.necla.simba.protocol.ActivePull;
import com.necla.simba.protocol.Column;
import com.necla.simba.protocol.CreateTable;
import com.necla.simba.protocol.SimbaMessage;
import com.necla.simba.protocol.NotificationPull;
import com.necla.simba.protocol.ObjectFragment;
import com.necla.simba.protocol.ObjectHeader;
import com.necla.simba.protocol.SimbaConsistency;
import com.necla.simba.protocol.SubscribeTable;
import com.necla.simba.protocol.SyncHeader;
import com.necla.simba.protocol.SyncRequest;
import com.necla.simba.protocol.UnsubscribeTable;
/***
* This class is the core of the Simba content service.
*/
public class SimbaContentService extends Service {
private static final String TAG = "SimbaContentService";
private final ConcurrentHashMap<String, SQLiteDatabase> client_dbhandle = new ConcurrentHashMap<String, SQLiteDatabase>();
private SimbaNetworkManager networkManager;
private SyncScheduler syncScheduler;
private WriteTimerManager writeTimerManager;
private SharedPreferences settings;
private ConnectionState cs;
private SimbaLevelDB mldb;
private static List<String> tblBitmap = new ArrayList<String>();
private int bitmapId = 0;
private Map<String, ArrayList<RowObject>> appToObj = new HashMap<String, ArrayList<RowObject>>();
private Map<String, byte[]> writeChunks = new HashMap<String, byte[]>();
private Map<String, byte[]> readChunks = new HashMap<String, byte[]>();
private Handler handler;
private static boolean isConnected = false;
private static boolean isRunning = false;
private static boolean createDirIfNotExists(String dir) {
boolean ret = true;
File file = new File(dir);
if (!file.exists()) {
if (!file.mkdirs()) {
Log.e(TAG, "Could not create database directory: " + dir);
ret = false;
}
}
return ret;
}
@Override
public void onCreate() {
Log.v(TAG, "SCS onCreate");
isRunning = true;
createDirIfNotExists(Preferences.DBPATH);
// configure logging
ConfigLog4j.configure();
settings = getSharedPreferences(Preferences.PREFS_NAME, 0);
String hostname = settings.getString("hostname",
Preferences.DEFAULT_HOST);
int port = settings.getInt("port", Preferences.DEFAULT_PORT);
settings.registerOnSharedPreferenceChangeListener(new OnSharedPreferenceChangeListener() {
@Override
public void onSharedPreferenceChanged(SharedPreferences prefs,
String key) {
if (key.equals("hostname") || key.equals("port"))
networkManager.updateNetworkSettings(prefs.getString(
"hostname", Preferences.DEFAULT_HOST), prefs
.getInt("port", Preferences.DEFAULT_PORT));
}
});
metadata = new Metadata(Preferences.DBPATH);
handler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case InternalMessages.NETWORK_CONNECTED: {
networkManager.tokenManager.networkConnected();
isConnected = true;
Intent intent = new Intent(
SimbaBroadcastReceiver.CONNECTION_STATE_CHANGED);
intent.putExtra(
SimbaBroadcastReceiver.EXTRA_CONNECTION_STATE,
true);
sendBroadcast(intent);
}
break;
case InternalMessages.NETWORK_DISCONNECTED: {
isConnected = false;
Intent intent = new Intent(
SimbaBroadcastReceiver.CONNECTION_STATE_CHANGED);
intent.putExtra(
SimbaBroadcastReceiver.EXTRA_CONNECTION_STATE,
false);
sendBroadcast(intent);
}
break;
case InternalMessages.CLIENT_LOST: {
String app = (String) msg.obj;
Log.v(TAG, "Client " + app
+ " is no longer connected, dropping");
boolean ret = unregisterAppInternal(app);
if (ret)
AppAuthenticationManager.unregisterByUID(app);
}
break;
case InternalMessages.AUTHENTICATION_DONE: {
networkManager.processPendingMessages();
}
break;
}
}
};
// get the connection state in SCS and pass it around
cs = new ConnectionState(this.getApplicationContext());
networkManager = new SimbaNetworkManager(this,
this.getApplicationContext(), hostname, port, handler, cs);
syncScheduler = new SyncScheduler(networkManager, cs);
writeTimerManager = new WriteTimerManager(syncScheduler, cs);
// create SimbaLevelDB
mldb = new SimbaLevelDB();
}
private Metadata metadata;
private SimbaChunkList chunklist;
private final IBinder mBinder = new SimbaContentServiceAPI.Stub() {
/**
* register the client, create its database if necessary, store its
* callback.
*
* @param uid
* user id
* @param callback
* callback handle for client
* @return the ticket id to be used for subsequent communication
*/
public String registerApp(String uid, ISCSClient callback)
throws RemoteException {
String tid = null;
SQLiteDatabase db;
if (ClientCallbackManager.clientExists(uid)) {
Log.w(TAG,
"Multiple registers from same app are not supported, removing previous registration");
boolean ret = unregisterAppInternal(uid);
if (ret)
AppAuthenticationManager.unregisterByUID(uid);
}
tid = AppAuthenticationManager.register(uid);
ClientCallbackManager.addClient(uid, callback, handler);
if (Preferences.WAL) {
Log.v(TAG, "Callback added: " + callback
+ "; creating app database with WAL for: " + uid);
db = SQLiteDatabase.openDatabase(Preferences.DBPATH + uid
+ ".db", null, SQLiteDatabase.CREATE_IF_NECESSARY
| SQLiteDatabase.ENABLE_WRITE_AHEAD_LOGGING, null);
// Log.d(TAG, "Checkpointing " + uid + "DB after create");
// db.rawQuery("PRAGMA wal_checkpoint", null);
} else {
Log.v(TAG, "Callback added: " + callback
+ "; creating app database in default mode for: " + uid);
db = SQLiteDatabase.openOrCreateDatabase(Preferences.DBPATH
+ uid + ".db", null);
}
client_dbhandle.put(uid, db);
List<SimbaTable> mtbls = recoverTables(uid, db);
SimbaTableManager.addRecoveredTables(uid, mtbls);
// subscribe tables if they have read sync setting
// TODO: this is very inefficient!
for (SimbaTable t : mtbls) {
int period = 0;
if ((period = metadata.getInteger(uid, t.getTblId(),
"readperiod", -1)) != -1) {
sub_tbl(uid, t.getTblId(), period, t.getSyncDT(false));
}
}
Log.v(TAG, db.getPath() + " opened");
return tid;
}
/**
* unregister the client
*/
public boolean unregisterApp(String tid) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid == null)
return false;
// TODO: close any objects that were opened by this app
boolean ret = unregisterAppInternal(uid);
if (ret)
AppAuthenticationManager.unregister(tid);
return ret;
}
public boolean isNetworkConnected() {
return isConnected;
}
private SimbaTable mtblSyncSetting(SimbaTable mtbl, int period,
int delay, ConnState syncpref, boolean rw) {
mtbl.enableSync(period, rw);
mtbl.setSyncDT(delay, rw);
mtbl.setSyncNWpref(syncpref, rw);
return mtbl;
}
private List<SimbaTable> recoverTables(String uid, SQLiteDatabase db) {
ArrayList<SimbaTable> mtbls = new ArrayList<SimbaTable>();
Cursor cursor = db.query("sqlite_master", new String[] { "name",
"sql" }, null, null, null, null, null, null);
cursor.moveToFirst();
do {
String tbl_name = cursor.getString(0);
if (tbl_name.endsWith("_deleted")
|| tbl_name.endsWith("_server")
|| tbl_name.endsWith("_chunk")
|| tbl_name.equals("android_metadata")
|| tbl_name.equals("sqlite_sequence"))
continue;
String schema = cursor.getString(1);
if (schema == null)
continue;
schema = SimbaTable.schemaToColumnSchema(schema);
Log.v(TAG, "Recovered table " + tbl_name + " schema=" + schema);
int lvl = restoreConsistencyLevel(uid, tbl_name);
TableProperties props = restoreTableProperties(uid, tbl_name);
SimbaTable mtbl = new SimbaTable(uid, db, tbl_name, schema,
lvl, props, metadata, true);
int period = 0, dt = 0;
ConnState syncpref = ConnState.TG;
// create table message
CreateTable.Builder t = CreateTable
.newBuilder()
.setApp(uid)
.setConsistencyLevel(
SimbaConsistency.newBuilder()
.setType(SimbaConsistency.Type.CAUSAL)
.build()).setTbl(tbl_name);
String[] columns = mtbl.getSchemaSQL().split("\\s+");
for (int i = 0; i < columns.length; i += 2) {
int type = Column.Type.VARCHAR;
if (columns[i + 1].startsWith("VARCHAR")) {
type = Column.Type.VARCHAR;
} else if (columns[i + 1].startsWith("INT")) {
type = Column.Type.INT;
} else if (columns[i + 1].startsWith("BIGINT")) {
type = Column.Type.OBJECT;
}
t.addElementColumns(Column.newBuilder().setName(columns[i])
.setType(type).build());
t.setConsistencyLevel(SimbaConsistency.newBuilder()
.setType(lvl).build());
}
SimbaMessage.Builder b = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.CREATE_TABLE)
.setCreateTable(t.build());
networkManager.sendTokenedMessage(b);
// get prefs from metadata table and assign to above variables
period = metadata.getInteger(uid, tbl_name, "writeperiod", -1);
if (period != -1) {
dt = metadata.getInteger(uid, tbl_name, "writedt", -1);
syncpref.setValue(metadata.getInteger(uid, tbl_name,
"writesyncpref", -1));
mtblSyncSetting(mtbl, period, dt, syncpref, true);
Log.v(TAG,
"Recovery: Table " + uid + "/" + tbl_name
+ " set to write sync with a period of "
+ mtbl.getSyncPeriod(true));
writeTimerManager.addTask(mtbl);
}
period = metadata.getInteger(uid, tbl_name, "readperiod", -1);
if (period != -1) {
dt = metadata.getInteger(uid, tbl_name, "readdt", -1);
syncpref.setValue(metadata.getInteger(uid, tbl_name,
"readsyncpref", -1));
mtblSyncSetting(mtbl, period, dt, syncpref, false);
Log.v(TAG,
"Recovery: Table " + uid + "/" + tbl_name
+ " set to read sync with a period of "
+ mtbl.getSyncPeriod(false));
// cannot subscribe before adding to TableManager yet!
// sub_tbl(uid, tbl_name, period, mtbl.getSyncDT(false));
}
// set torn rows for rows with open objects
List<Long> torn_objs = new ArrayList<Long>();
mtbl.setTornRows(torn_objs);
// recover dirtyChunkList from the DIRTYCHUNKLIST table
if (SimbaChunkList.recoverDirtyChunkList(mtbl, torn_objs)) {
// recover dirty rows with dirtyChunkList
mtbl.recoverDirtyRows();
}
int conflictedRows = 0;
// remove any obsolete rows in TBL_CONFLICT
conflictedRows += mtbl.recoverConflictTable();
conflictedRows += mtbl.recoverDeleteTable();
mtbls.add(mtbl);
// if there is any conflict rows, alert the client app
if (conflictedRows > 0) {
try {
ISCSClient client = ClientCallbackManager
.getCallback(uid);
if (client != null) {
client.syncConflict(tbl_name, conflictedRows);
}
} catch (RemoteException e) {
// TODO: handle app crash while recovering
// e.printStackTrace();
}
}
} while (cursor.moveToNext());
cursor.close();
return mtbls;
}
private void saveConsistencyLevel(String uid, String tbl, int lvl) {
metadata.put(uid, tbl, "consistency_level", lvl);
}
private void saveTableProperties(String uid, String tbl,
TableProperties props) {
metadata.put(uid, tbl, "partial", props.isPartial());
}
private int restoreConsistencyLevel(String uid, String tbl) {
return metadata.getInteger(uid, tbl, "consistency_level", -1);
}
private TableProperties restoreTableProperties(String uid, String tbl) {
boolean partial = metadata.getBoolean(uid, tbl, "partial", false);
return new TableProperties(partial);
}
public boolean subscribeTable(String tid, String tbl, int period,
int delay, ConnState syncpref) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid == null)
return false;
// insert to table bitmap
Log.d(TAG, "Inserting <" + uid + ", " + tbl + "> to table bitmap");
tblBitmap.add(uid + "," + tbl);
// subscribe to table with _rev -1
SubscribeTable t = SubscribeTable
.newBuilder()
.setApp(uid)
.setTbl(tbl)
.setDelayTolerance(
(int) (delay * Preferences.READ_DT_SERVER_RATIO))
.setPeriod(period).setRev(-1).build();
SimbaMessage.Builder b = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.SUB_TBL).setSubscribeTable(t);
// SimbaLogger.log("up, " + b.toString() + ", " +
// b.build().toString() + ", " + b.build().computeSize());
networkManager.sendTokenedMessage(b);
return true;
}
public boolean createTable(String tid, String tbl, String cmd, int lvl,
TableProperties props) throws RemoteException {
boolean ret = false;
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
if (mtbl != null) {
Log.v(TAG, "table " + uid + "/" + tbl + " already created");
ret = true;
} else {
SQLiteDatabase db = client_dbhandle.get(uid);
String sql = "CREATE TABLE IF NOT EXISTS " + tbl + " ("
+ cmd + ");";
db.execSQL(sql);
/*
* add extra fields: row id, revision number, dirty flag
* (row, obj)
*/
try {
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _id VARCHAR;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _rev INT DEFAULT -1;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _torn BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _dirty BIT DEFAULT 1;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _dirtyObj BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _openObj INT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _sync BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _conflict BIT DEFAULT 0;");
} catch (SQLiteException e) {
/* do nothing: the column already exists */
}
ret = SimbaTableManager.addTable(uid, tbl,
new SimbaTable(uid, db, tbl, cmd, lvl, props,
metadata, false));
saveConsistencyLevel(uid, tbl, lvl);
saveTableProperties(uid, tbl, props);
/* send CreateTable message to server */
CreateTable.Builder t = CreateTable.newBuilder()
.setApp(uid).setTbl(tbl);
String[] columns = cmd.split("\\s+");
for (int i = 0; i < columns.length; i += 2) {
int type = Column.Type.VARCHAR;
if (columns[i + 1].startsWith("VARCHAR")) {
type = Column.Type.VARCHAR;
} else if (columns[i + 1].startsWith("INT")) {
type = Column.Type.INT;
} else if (columns[i + 1].startsWith("BIGINT")) {
type = Column.Type.OBJECT;
}
t.addElementColumns(Column.newBuilder()
.setName(columns[i]).setType(type).build());
t.setConsistencyLevel(SimbaConsistency.newBuilder()
.setType(lvl).build());
}
SimbaMessage.Builder b = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.CREATE_TABLE)
.setCreateTable(t.build());
// SimbaLogger.log("up, " + b.toString() + ", " +
// b.build().toString() + ", " + b.build().computeSize());
networkManager.sendTokenedMessage(b);
}
}
Log.v(TAG, SimbaTableManager.dump());
return ret;
}
public List<RowObject> write(String tid, String tbl,
ContentValues values, String[] objectOrdering)
throws RemoteException {
List<RowObject> ro_list = new ArrayList<RowObject>();
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
ro_list = mtbl.write(values, objectOrdering);
// set mapping between application and opened objects
// mapping between object & row is handled in SimbaTable!
ArrayList<RowObject> objects = appToObj.get(uid);
if (objects == null) {
objects = new ArrayList<RowObject>();
}
for (RowObject ro : ro_list) {
objects.add(ro);
Log.d(TAG, "Setting mapping between app: " + uid
+ ", obj_id: " + ro.GetObjectID());
}
appToObj.put(uid, objects);
/* YGO: sync immediately if strong consistency without object */
if (ro_list.size() == 0
&& mtbl.getConsistencyLevel() == SimbaConsistency.Type.STRONG) {
syncStrongConsistency(mtbl);
}
} else {
/* client not registered */
}
return ro_list;
}
public SimbaCursorWindow read(String tid, String tbl, String[] projs,
String sels, String[] selArgs, String sortOrder)
throws RemoteException {
SimbaCursorWindow ret = null;
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
ret = mtbl.read(projs, sels, selArgs, sortOrder);
} else {
/* client not registered */
}
return ret;
}
public List<RowObject> update(String tid, String tbl,
ContentValues values, String sels, String[] selArgs,
String[] objectOrdering) throws RemoteException {
List<RowObject> ro_list = new ArrayList<RowObject>();
String uid = AppAuthenticationManager.authenticate(tid);
String[] newSelArgs = null;
// remove torn rows from selection
if (sels != null) {
if (selArgs == null) {
sels += " AND _torn = 0";
} else {
sels += " AND _torn = ?";
newSelArgs = new String[selArgs.length + 1];
int i = 0;
for (i = 0; i < selArgs.length; i++) {
newSelArgs[i] = selArgs[i];
}
newSelArgs[i] = "0";
}
}
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
if (newSelArgs == null) {
ro_list = mtbl
.update(values, sels, selArgs, objectOrdering);
} else {
ro_list = mtbl.update(values, sels, newSelArgs,
objectOrdering);
}
// set mapping between application and opened objects
// mapping between object & row is handled in SimbaTable!
ArrayList<RowObject> objects = appToObj.get(uid);
if (objects == null) {
objects = new ArrayList<RowObject>();
}
for (RowObject ro : ro_list) {
objects.add(ro);
Log.d(TAG, "Setting mapping between app: " + uid
+ ", obj_id: " + ro.GetObjectID());
}
appToObj.put(uid, objects);
/* YGO: sync immediately if strong consistency without object */
if (ro_list.size() == 0
&& mtbl.getConsistencyLevel() == SimbaConsistency.Type.STRONG) {
syncStrongConsistency(mtbl);
}
} else {
/* client not registered */
}
return ro_list;
}
public int delete(String tid, String tbl, String sels, String[] selArgs)
throws RemoteException {
int ret = 0;
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
ret = mtbl.delete(sels, selArgs);
/* YGO: sync if strong consistency */
if (mtbl.getConsistencyLevel() == SimbaConsistency.Type.STRONG) {
syncStrongConsistency(mtbl);
}
} else {
/* client not registered */
}
return ret;
}
public void writeSyncOneshot(String tid, String tbl, int delay)
throws RemoteException {
/* TODO: special case: client-initiated one-time sync */
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
write_sync_oneshot(mtbl, delay);
} else {
/* client not registered */
}
}
public void registerPeriodicWriteSync(String tid, String tbl,
int period, int dt, ConnState syncpref) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
boolean rw = true;
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
if (!mtbl.isSyncEnabled(rw)) {
mtblSyncSetting(mtbl, period, dt, syncpref, rw);
Log.v(TAG, "Table " + uid + "/" + tid
+ " being set to write sync with a period of "
+ mtbl.getSyncPeriod(rw));
writeTimerManager.addTask(mtbl);
} else {
Log.v(TAG,
"Table "
+ uid
+ "/"
+ tid
+ " curently already set to write sync with a period of "
+ mtbl.getSyncPeriod(rw));
Log.v(TAG,
"Updating WriteSync preferences with new values; period:"
+ mtbl.getSyncPeriod(rw) + ", new Period: "
+ period + ", DT:" + mtbl.getSyncDT(rw)
+ ", syncpref:" + mtbl.getSyncNWpref(rw));
// TODO: replace by writeTimerManager.updateTask(mtbl);
writeTimerManager.removeTask(mtbl);
mtblSyncSetting(mtbl, period, dt, syncpref, rw);
writeTimerManager.addTask(mtbl);
}
/* store period and DT to Simba meta-data table */
metadata.put(uid, tbl, "writeperiod", period);
metadata.put(uid, tbl, "writedt", dt);
metadata.put(uid, tbl, "writesyncpref", syncpref.getValue());
} else {
Log.v(TAG,
"registerPeriodicWriteSync client not registered tid="
+ tid);
}
}
public void beginCR(String tid, String tbl) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
mtbl.beginCR();
} else {
/* client not registered */
}
}
public void endCR(String tid, String tbl) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
// if PullData was stopped in the past, send NotificationPull
if (mtbl.isSendNotificationPull()) {
mtbl.setSendNotificationPull(false);
mtbl.endCR();
Log.d(TAG,
"Sending NOTIFICATION_PULL after endCR! table: "
+ mtbl.getTblId() + ", fromVersion: "
+ mtbl.getRev());
NotificationPull.Builder np = NotificationPull.newBuilder()
.setApp(mtbl.getAppId()).setTbl(mtbl.getTblId())
.setFromVersion(mtbl.getRev());
int seq = SeqNumManager.getSeq();
SimbaMessage.Builder m = SimbaMessage.newBuilder()
.setSeq(seq)
.setType(SimbaMessage.Type.NOTIFICATION_PULL)
.setNotificationPull(np.build());
networkManager.sendTokenedMessage(m);
SeqNumManager.addPendingSeq(seq, m.build());
} else {
mtbl.endCR();
}
} else {
/* client not registered */
}
}
public List<InternalDataRow> getConflictedRows(String tid, String tbl)
throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
List<InternalDataRow> ret = new ArrayList<InternalDataRow>();
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
ret = mtbl.getConflictedRows();
} else {
/* client not registered */
}
return ret;
}
public void resolveConflict(String tid, String tbl, String id,
CRChoice choice) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
mtbl.resolveConflict(id, choice);
} else {
/* client not registered */
}
}
public void readSyncOneshot(String tid, String tbl)
throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
String token = networkManager.tokenManager.getToken();
if (uid != null) {
if (token == null)
throw new RemoteException("Authentication not complete yet");
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
List<String> payload = new ArrayList<String>();
payload.add(mtbl.getAppId());
payload.add(mtbl.getTblId());
int seq = SeqNumManager.getSeq();
ActivePull ap = ActivePull.newBuilder().setApp(mtbl.getAppId())
.setTbl(mtbl.getTblId()).build();
SimbaMessage m = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.ACTIVE_PULL).setSeq(seq)
.setToken(networkManager.tokenManager.getToken())
.setActivePull(ap).build();
SeqNumManager.addPendingSeq(seq, m);
networkManager.sendMessage(m);
} else {
/* client not registered */
}
}
/*
* Redundant function? public void registerPeriodicReadSync(String tid,
* String tbl, int period, int delay, ConnState syncpref) throws
* RemoteException { boolean rw = false; String uid =
* AppAuthenticationManager.authenticate(tid); if (uid != null) {
* SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl); if
* (!mtbl.isSyncEnabled(rw)) { mtblSyncSetting(mtbl, period, delay,
* syncpref, rw); Log.v(TAG, "1--Subscribing read timer on " + tbl +
* " with p=" + period + " and n/w sync choice=" + syncpref); } else {
* Log.v(TAG, "Table " + uid + "/" + tid +
* " cuurently already set to read sync with a period of " +
* mtbl.getSyncPeriod(rw)); Log.v(TAG,
* "1--Updating ReadSync preferences with new values; period:" +
* mtbl.getSyncPeriod(rw) + ", syncpref:" + mtbl.getSyncNWpref(rw));
* mtblSyncSetting(mtbl, period, delay, syncpref, rw);
*
* } metadata.put(uid, tbl, "readperiod", period); metadata.put(uid,
* tbl, "readdt", delay); metadata.put(uid, tbl, "readsyncpref",
* syncpref.getValue()); } else { // client not registered } }
*/
public void subscribePeriodicReadSync(String tid, String tbl,
int period, int dt, ConnState syncpref) throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
boolean rw = false;
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
if (!mtbl.isSyncEnabled(rw)) {
mtblSyncSetting(mtbl, period, dt, syncpref, rw);
Log.v(TAG, "2--Subscribing read timer on " + tbl
+ " with p=" + period + " and n/w sync choice="
+ syncpref);
// sub_tbl(uid, tbl, period, dt);
} else {
Log.v(TAG,
"Table "
+ uid
+ "/"
+ tid
+ " cuurently already set to read sync with a period of "
+ mtbl.getSyncPeriod(rw));
Log.v(TAG,
"2--Updating ReadSync preferences with new values; period:"
+ mtbl.getSyncPeriod(rw) + ", new Period: "
+ period + ", syncpref:"
+ mtbl.getSyncNWpref(rw));
mtblSyncSetting(mtbl, period, dt, syncpref, rw);
}
sub_tbl(uid, tbl, period, dt);
metadata.put(uid, tbl, "readperiod", period);
metadata.put(uid, tbl, "readdt", dt);
metadata.put(uid, tbl, "readsyncpref", syncpref.getValue());
} else {
/* client not registered */
}
}
public void unsubscribePeriodicReadSync(String tid, String tbl)
throws RemoteException {
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
Log.v(TAG, "Unsubscribing read timer on " + tbl);
unsub_tbl(uid, tbl);
} else {
/* client not registered */
}
}
@Override
public void unregisterPeriodicWriteSync(String tid, String tbl)
throws RemoteException {
Log.v(TAG, "unregister for tbl=" + tbl);
boolean rw = true;
String uid = AppAuthenticationManager.authenticate(tid);
if (uid != null) {
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
if (mtbl.isSyncEnabled(rw)) {
// mtbl.disableSync();
writeTimerManager.removeTask(mtbl);
/* store period and DT to Simba meta-data table */
metadata.remove(uid, tbl, new String[] { "writeperiod",
"writedt", "writesyncpref" });
} else {
Log.v(TAG, "Table " + uid + "/" + tid
+ "has syncing disabled already");
/* table not registered for syncing */
}
} else {
/* client not registered */
}
}
/* get table schema */
public String getSchemaSQL(String tid, String tbl) {
String uid = AppAuthenticationManager.authenticate(tid);
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
return mtbl.getSchemaSQL();
}
/* write to leveldb */
public int writeStream(String tid, String tbl, long obj_id,
int chunk_num, byte[] buffer, int offset, int length) {
String uid = AppAuthenticationManager.authenticate(tid);
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
// TODO: handle update with strong consistency
// 1) if partial chunk, check whether object already exists
// 2) read from it, then overwrite it
// if partial buffer is received, merge with full buffer
if (buffer.length < length) {
String key = Long.toString(obj_id) + ","
+ Integer.toString(chunk_num);
byte[] buf;
if (writeChunks.containsKey(key)) {
buf = writeChunks.get(key);
} else {
buf = new byte[length];
Log.v(TAG, "Trying for Buf length: " + length
+ " Allocated buf: " + buf.length);
}
try {
System.arraycopy(buffer, 0, buf, offset, buffer.length);
} catch (Exception e) {
e.printStackTrace();
Log.v(TAG, "***************Arraycopy failure**********");
}
// full buffer is complete
if (buffer.length + offset == length) {
ArrayList<RowObject> ro_list = appToObj.get(uid);
for (RowObject ro : ro_list) {
if (ro.GetObjectID() == obj_id) {
writeChunks.remove(key);
return SimbaLevelDB.write(obj_id, chunk_num, buf,
length);
}
}
} else {
writeChunks.put(key, buf);
return buffer.length;
}
} else {
ArrayList<RowObject> ro_list = appToObj.get(uid);
for (RowObject ro : ro_list) {
if (ro.GetObjectID() == obj_id) {
return SimbaLevelDB.write(obj_id, chunk_num, buffer,
length);
}
}
}
return -1;
}
/* truncate the length of object */
public int truncate(String tid, String tbl, String row_id, long obj_id,
int length) {
String uid = AppAuthenticationManager.authenticate(tid);
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
Log.d(TAG, "Truncate obj: " + obj_id + ", length: " + length);
if (mtbl.getConsistencyLevel() == SimbaConsistency.Type.STRONG) {
/* YGO: truncate object at strong consistency */
mtbl.truncate(obj_id, length);
return length;
} else {
// truncate only if not strong consistency
return SimbaLevelDB.truncate(obj_id, length, true);
}
}
/* read from leveldb */
public int readStream(long obj_id, byte[] buffer, int buffer_off,
int offset, int length) {
int chunk_num = offset / SimbaLevelDB.getChunkSize();
int len = 0;
if (buffer.length < length) {
byte[] buf;
String key = Long.toString(obj_id) + ","
+ Integer.toString(chunk_num);
if (readChunks.containsKey(key)) {
buf = readChunks.get(key);
} else {
buf = new byte[length];
len = mldb.read(obj_id, buf, offset);
readChunks.put(key, buf);
}
System.arraycopy(buf, buffer_off, buffer, 0, buffer.length);
len = buffer.length;
if (buffer_off + buffer.length == length) {
readChunks.remove(key);
}
} else {
len = mldb.read(obj_id, buffer, offset);
}
return len;
}
/* decrement object open counter */
public void decrementObjCounter(String tid, String tbl, long obj_id) {
String uid = AppAuthenticationManager.authenticate(tid);
// allow decrement only if the objct is opened
ArrayList<RowObject> ro_list = appToObj.get(uid);
for (RowObject ro : ro_list) {
if (ro.GetObjectID() == obj_id) {
// remove mapping of the closed object
Log.d(TAG, "Removing mapping between app: " + uid
+ ", obj_id: " + obj_id);
ArrayList<RowObject> objects = appToObj.get(uid);
ArrayList<RowObject> new_objects = new ArrayList<RowObject>();
assert (objects != null);
for (int i = 0; i < objects.size(); i++) {
if (objects.get(i).GetObjectID() != obj_id) {
new_objects.add(objects.get(i));
}
}
appToObj.put(uid, new_objects);
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
int count = mtbl.decrementObjCounter(obj_id);
/*
* YGO: sync if strong consistency and all objects are
* closed
*/
if (count == 0
&& mtbl.getConsistencyLevel() == SimbaConsistency.Type.STRONG) {
syncStrongConsistency(mtbl);
}
break;
}
}
}
/* !!!NOT USED!!! set opened objects received from app */
// Map<String, ArrayList<RowObject>>
public void setOpenObjects(String tid, Map openObjects) {
String uid = AppAuthenticationManager.authenticate(tid);
ArrayList<RowObject> objects = appToObj.get(uid);
if (objects == null) {
objects = new ArrayList<RowObject>();
}
Map<String, List<RowObject>> ro_list = (Map<String, List<RowObject>>) openObjects;
for (Map.Entry<String, List<RowObject>> entry : ro_list.entrySet()) {
String tbl = entry.getKey();
List<RowObject> ro = entry.getValue();
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
for (int i = 0; i < ro.size(); i++) {
// store object_id in appToObj
objects.add(ro.get(i));
}
}
appToObj.put(uid, objects);
}
/* YGO: sync with SyncSet when using strong consistency */
public void syncStrongConsistency(SimbaTable mtbl) {
// if there was update while writing/updating, remove SyncSet
// take snapshot before creating SyncRequest message
ReadOptions ro = SimbaLevelDB.takeSnapshot();
Map<Integer, Long> obj_list = new HashMap<Integer, Long>();
SyncHeader h = mtbl.buildDataForSyncingStrong(obj_list);
if (h == null) {
Log.d(TAG, "There was update while writing! Do not sync!");
} else if (h.getDirtyRows().isEmpty()
&& h.getDeletedRows().isEmpty()) {
Log.d(TAG, "Nothing to sync!");
assert false;
} else {
Log.d(TAG, "sync header=" + h);
SyncRequest r = SyncRequest.newBuilder().setData(h).build();
SimbaMessage.Builder mb = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.SYNC_REQUEST)
.setSeq(r.getData().getTrans_id()).setSyncRequest(r);
Log.d(TAG, "Sending SYNC_REQUEST! app: " + r.getData().getApp()
+ ", tbl: " + r.getData().getTbl() + ", trans_id: "
+ r.getData().getTrans_id());
networkManager.sendMessageNow(mb, r.getData().getTrans_id());
SeqNumManager.addPendingSeq(r.getData().getTrans_id(),
mb.build());
/* create ObjectFragments */
for (Map.Entry<Integer, Long> entry : obj_list.entrySet()) {
int oid = entry.getKey();
long objID = entry.getValue();
int numChunks = SimbaLevelDB.getNumChunks(objID);
for (int i = 0; i < numChunks; i++) {
byte[] buffer = SimbaLevelDB.getChunk(ro, objID, i);
ObjectFragment of = ObjectFragment.newBuilder()
.setTrans_id(r.getData().getTrans_id())
.setOid(oid).setOffset(i)
.setData(ByteString.copyFrom(buffer))
.setEof(i + 1 == numChunks ? true : false)
.build();
Log.d(TAG, "Sending all OBJECT_FRAGMENTs! trans_id: "
+ of.getTrans_id() + ", oid: " + of.getOid()
+ ", offset: " + of.getOffset() + ", eof: " + of.getEof());
SimbaMessage.Builder mb_of = SimbaMessage
.newBuilder()
.setType(SimbaMessage.Type.OBJECT_FRAGMENT)
.setSeq(r.getData().getTrans_id())
.setObjectFragment(of);
networkManager.sendMessageNow(mb_of, r.getData()
.getTrans_id());
}
}
}
// close snapshot after all sync operations are done
SimbaLevelDB.closeSnapshot(ro);
mtbl.waitForSyncResponse();
}
};
public IBinder onBind(Intent arg0) {
return mBinder;
}
@Override
public void onDestroy() {
// simba_db.execSQL("DROP TABLE metadata;");
try {
metadata.close();
} catch (IOException e) {
e.printStackTrace();
}
super.onDestroy();
isRunning = false;
}
public static boolean isRunning() {
return isRunning;
}
// we have made sure this service will only be called once
@Override
public int onStartCommand(Intent intent, int flags, int startid) {
Log.d(TAG, "SCS started");
return Service.START_NOT_STICKY;
}
public static boolean isNetworkConnected() {
return isConnected;
}
public void createTable(String uid, String tbl, String cmd,
TableProperties props) {
// FIX: set consistency level from app
int lvl = SimbaConsistency.Type.STRONG;
SQLiteDatabase db = client_dbhandle.get(uid);
String sql = "CREATE TABLE IF NOT EXISTS " + tbl + " (" + cmd + ");";
db.execSQL(sql);
/*
* add extra fields: row id, revision number, dirty flag (row, obj)
*/
try {
db.execSQL("ALTER TABLE " + tbl + " ADD COLUMN _id VARCHAR;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _rev INT DEFAULT -1;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _torn BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _dirty BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _dirtyObj BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _openObj INT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _sync BIT DEFAULT 0;");
db.execSQL("ALTER TABLE " + tbl
+ " ADD COLUMN _conflict BIT DEFAULT 0;");
} catch (SQLiteException e) {
/* do nothing: the column already exists */
}
SimbaTableManager.addTable(uid, tbl, new SimbaTable(uid, db, tbl,
cmd, lvl, props, metadata, false));
metadata.put(uid, tbl, "partial", props.isPartial());
}
private void write_sync_oneshot(SimbaTable mtbl, int dt) {
Map<Integer, Long> obj_list = new HashMap<Integer, Long>();
SyncHeader d = mtbl.buildDataForSyncing(obj_list);
if (!d.getDirtyRows().isEmpty() || !d.getDeletedRows().isEmpty()) {
SyncRequest r = SyncRequest.newBuilder().setData(d).build();
SimbaMessage.Builder mmsg = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.SYNC_REQUEST).setSyncRequest(r);
// SimbaLogger.log("up, " + mmsg.toString() + ", " +
// mmsg.build().toString() + ", " + mmsg.build().computeSize());
syncScheduler.schedule(mmsg, true, dt);
if (!obj_list.isEmpty()) {
/* object fragments */
/* create ObjectFragments */
ReadOptions ro = SimbaLevelDB.takeSnapshot();
for (Map.Entry<Integer, Long> entry : obj_list.entrySet()) {
int oid = entry.getKey();
long objID = entry.getValue();
BitSet dirtyChunkList = SimbaChunkList
.getDirtyChunks(objID);
for (int i = dirtyChunkList.nextSetBit(0); i >= 0;) {
int chunk_num = i;
byte[] buffer = SimbaLevelDB.getChunk(ro, objID,
chunk_num);
i = dirtyChunkList.nextSetBit(i + 1);
ObjectFragment of = ObjectFragment
.newBuilder()
.setTrans_id(r.getData().getTrans_id())
.setOid(oid)
.setOffset(
chunk_num
* SimbaLevelDB.getChunkSize())
.setData(ByteString.copyFrom(buffer))
.setEof(i == -1 ? true : false).build();
Log.d(TAG,
"Sending OBJECT_FRAGMENT! trans_id: "
+ of.getTrans_id() + ", oid: "
+ of.getOid() + ", offset: "
+ of.getOffset());
SimbaMessage.Builder mb_of = SimbaMessage
.newBuilder()
.setType(SimbaMessage.Type.OBJECT_FRAGMENT)
.setObjectFragment(of);
// SimbaLogger.log("up, " + mb_of.toString() + ", " +
// mb_of.build().toString() + ", " +
// mb_of.build().computeSize());
syncScheduler.schedule(mb_of, true, dt);
}
}
SimbaLevelDB.closeSnapshot(ro);
} else {
Log.d(TAG, "write_sync_oneshot no objects to sync");
}
}
}
private boolean unregisterAppInternal(String uid) {
boolean ret = false;
if (ClientCallbackManager.clientExists(uid)) {
ClientCallbackManager.removeClient(uid);
ConcurrentHashMap<String, SimbaTable> app_tbls = SimbaTableManager
.getAllTables(uid);
for (Enumeration<String> enu = app_tbls.keys(); enu
.hasMoreElements();) {
String tbl = enu.nextElement();
writeTimerManager.removeTask(app_tbls.get(tbl));
// remove dirty chunk list
SimbaChunkList.removeDirtyChunkList(uid, tbl);
}
Log.d(TAG, "Removing mapping for app: " + uid);
appToObj.remove(uid);
SimbaTableManager.dropTables(uid);
// close database, otherwise Android will complain about
// close() not called exception.
client_dbhandle.get(uid).close();
ret = true;
}
return ret;
}
/* Subscribe Read sync requests to Server */
private void sub_tbl(String uid, String tbl, int period, int dt) {
// insert to table bitmap
Log.d(TAG, "Inserting <" + uid + ", " + tbl + "> to table bitmap");
tblBitmap.add(uid + "," + tbl);
SimbaTable mtbl = SimbaTableManager.getTable(uid, tbl);
SubscribeTable t = SubscribeTable
.newBuilder()
.setApp(uid)
.setTbl(tbl)
.setDelayTolerance(
(int) (dt * Preferences.READ_DT_SERVER_RATIO))
.setPeriod(period).setRev(mtbl.getRev()).build();
SimbaMessage.Builder b = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.SUB_TBL).setSubscribeTable(t);
// SimbaLogger.log("up, " + b.toString()); // + ", " +
// b.build().toString() + ", " + b.build().computeSize());
networkManager.sendTokenedMessage(b);
}
private void unsub_tbl(String uid, String tbl) {
// remove from table bitmap
for (String bit : tblBitmap) {
String[] uid_tid = bit.split("\\,");
if (uid_tid[1] == tbl) {
tblBitmap.remove(bit);
break;
}
}
UnsubscribeTable t = UnsubscribeTable.newBuilder().setApp(uid)
.setTbl(tbl).build();
SimbaMessage.Builder b = SimbaMessage.newBuilder()
.setType(SimbaMessage.Type.UNSUB_TBL).setUnsubscribeTable(t);
// SimbaLogger.log("up, " + b.toString() + ", " + b.build().toString()
// + ", " + b.build().computeSize());
networkManager.sendTokenedMessage(b);
}
/* return table name for bitmap id */
public static String getUidTid(int bitmap_id) {
return tblBitmap.get(bitmap_id);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package vista;
import Controlador.usuario;
import java.sql.DriverManager;
import javax.swing.JOptionPane;
import javax.swing.JTable;
import javax.swing.table.DefaultTableModel;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.Statement;
import javax.swing.JOptionPane;
/**
*
* @author claudio
*/
public class frmUsuarios extends javax.swing.JFrame {
private Connection con = null;
private ResultSet rs = null;
private Statement stmt= null;
private ResultSetMetaData rsm = null;
// ahora es el momento preciso para intanciar a la clace usuario aprovecha xoro
DefaultTableModel modelo = new DefaultTableModel();
JTable tabla = new JTable (modelo);
usuario miUsuario = new usuario();
/**
* Creates new form frmUsuarios
*/
public frmUsuarios() {
initComponents();
setLocationRelativeTo(null);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
txtid = new javax.swing.JTextField();
txtNombre = new javax.swing.JTextField();
txtApellido = new javax.swing.JTextField();
txtUsuario = new javax.swing.JTextField();
txtClave = new javax.swing.JTextField();
jLabel8 = new javax.swing.JLabel();
btnBuscar = new javax.swing.JButton();
btnMostrarTable = new javax.swing.JButton();
btnActualizar = new javax.swing.JButton();
btnConsultar = new javax.swing.JButton();
btnAgregar = new javax.swing.JButton();
btnEliminar = new javax.swing.JButton();
btnAceptar = new javax.swing.JButton();
btnNuevo = new javax.swing.JButton();
jLabel9 = new javax.swing.JLabel();
jButton4 = new javax.swing.JButton();
jScrollPane2 = new javax.swing.JScrollPane();
jt1 = new javax.swing.JTable();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setFont(new java.awt.Font("Agency FB", 2, 12)); // NOI18N
jLabel1.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
jLabel1.setText("MANTENIMIENTO USUARIOS");
jLabel3.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel3.setText("ID");
jLabel4.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel4.setText("Nombre");
jLabel5.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel5.setText("Apellido");
jLabel6.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel6.setText("Usuario");
jLabel7.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel7.setText("Clave");
txtid.setEditable(false);
txtid.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
txtNombre.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
txtApellido.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
txtUsuario.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
txtClave.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/usuario99.png"))); // NOI18N
btnBuscar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnBuscar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/lupita.png"))); // NOI18N
btnBuscar.setText("Buscar");
btnBuscar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnBuscarMouseClicked(evt);
}
});
btnBuscar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnBuscarActionPerformed(evt);
}
});
btnMostrarTable.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnMostrarTable.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/save.png"))); // NOI18N
btnMostrarTable.setText("MOSTRAR");
btnMostrarTable.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnMostrarTableMouseClicked(evt);
}
});
btnActualizar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnActualizar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/actualizar.png"))); // NOI18N
btnActualizar.setText("Actualizar");
btnActualizar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnActualizarMouseClicked(evt);
}
});
btnConsultar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnConsultar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/consultar.png"))); // NOI18N
btnConsultar.setText("Consultar");
btnAgregar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnAgregar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/add.png"))); // NOI18N
btnAgregar.setText("NUEVO");
btnAgregar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnAgregarMouseClicked(evt);
}
});
btnEliminar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnEliminar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/eliminar.png"))); // NOI18N
btnEliminar.setText("Eliminar");
btnEliminar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnEliminarMouseClicked(evt);
}
});
btnAceptar.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnAceptar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/Ok.png"))); // NOI18N
btnAceptar.setText("ACEPTAR");
btnAceptar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnAceptarMouseClicked(evt);
}
});
btnAceptar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnAceptarActionPerformed(evt);
}
});
btnNuevo.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
btnNuevo.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/escoba.png"))); // NOI18N
btnNuevo.setText("Limpiar");
btnNuevo.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btnNuevoMouseClicked(evt);
}
});
btnNuevo.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnNuevoActionPerformed(evt);
}
});
jLabel9.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
jLabel9.setText("Lista de Usuarios");
jButton4.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/atras3.png"))); // NOI18N
jButton4.setText("Volver");
jButton4.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jButton4MouseClicked(evt);
}
});
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jt1.setForeground(java.awt.Color.blue);
jt1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{},
{},
{},
{}
},
new String [] {
}
));
jScrollPane2.setViewportView(jt1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 115, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addGap(19, 19, 19)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel8)
.addGap(72, 72, 72)
.addComponent(jLabel1))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel4)
.addComponent(jLabel5, javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel3, javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel7)
.addComponent(jLabel6)))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(txtid, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtNombre, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtApellido, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(txtUsuario, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(10, 10, 10))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel9)
.addGroup(layout.createSequentialGroup()
.addGap(54, 54, 54)
.addComponent(txtClave, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(btnActualizar, javax.swing.GroupLayout.PREFERRED_SIZE, 121, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnAgregar, javax.swing.GroupLayout.PREFERRED_SIZE, 121, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnNuevo, javax.swing.GroupLayout.PREFERRED_SIZE, 121, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnAceptar, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.PREFERRED_SIZE, 119, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(btnConsultar, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnEliminar, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnBuscar, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnMostrarTable, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))
.addGap(2, 2, 2)))
.addContainerGap(104, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel1)
.addGap(60, 60, 60)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnNuevo, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnBuscar, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnAgregar, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnEliminar, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel8)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtid, javax.swing.GroupLayout.PREFERRED_SIZE, 24, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel3))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtNombre, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel4))))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtApellido, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtUsuario, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel6))
.addGap(6, 6, 6)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtClave, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel7))
.addGap(31, 31, 31)
.addComponent(jLabel9))
.addGroup(layout.createSequentialGroup()
.addGap(3, 3, 3)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnActualizar, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnConsultar, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnAceptar, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnMostrarTable, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 203, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(16, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void btnAceptarMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnAceptarMouseClicked
}//GEN-LAST:event_btnAceptarMouseClicked
private void btnAceptarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAceptarActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_btnAceptarActionPerformed
private void jButton4MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButton4MouseClicked
// TODO add your handling code here:
framePrincipal formInicial = new framePrincipal();
formInicial.setVisible(true);
this.dispose();
}//GEN-LAST:event_jButton4MouseClicked
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jButton4ActionPerformed
private void btnAgregarMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnAgregarMouseClicked
// If (txtEjemplo.getText().equals(""))
if(this.txtUsuario.getText().equals("")){
if(this.txtid.getText().equals("")||this.txtNombre.getText().equals("")|| this.txtClave.getText().equals("")||this.txtApellido.getText().equals("")) {
System.out.println("en blanco no sirve");
JOptionPane.showMessageDialog(this, "DEBES INGRESAR LOS DATOS APROPIADOS");}}
else{
// miUsuario.setId(Integer.parseInt(txtid.getText()));
miUsuario.setNombre(txtNombre.getText());
miUsuario.setApellido(txtApellido.getText());
miUsuario.setUsuario(txtUsuario.getText());
miUsuario.setClave(txtClave.getText());
// this.txtid.setText("");
this.txtClave.setText("");
this.txtApellido.setText("");
this.txtUsuario.setText("");
this.txtNombre.setText("");
if(miUsuario.insertar()){System.out.println("Ingresado");}
else{ JOptionPane.showMessageDialog(this,"CONEXION FALLIDA","Estado de Conexion",JOptionPane.WARNING_MESSAGE);}
}
}//GEN-LAST:event_btnAgregarMouseClicked
private void btnNuevoMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnNuevoMouseClicked
this.txtid.setText("");
this.txtClave.setText("");
this.txtApellido.setText("");
this.txtUsuario.setText("");
this.txtNombre.setText("");
System.out.println("texbox limpiados correctamente");
// JOptionPane.showMessageDialog(this, "DEBE INGRESAR UN CARACTER PARA NOMBRE USUARIO Y CLAVE");
}//GEN-LAST:event_btnNuevoMouseClicked
private void btnBuscarMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnBuscarMouseClicked
int codigo=0;
this.txtid.setText("");
this.txtNombre.setText("");
this.txtApellido.setText("");
this.txtUsuario.setText("");
this.txtClave.setText("");
//String usuarioIngreso = JOptionPane.showInputDialog(null,"Ingrese USUARIO");
try {codigo = Integer.parseInt(JOptionPane.showInputDialog("Ingrese Codigo a consultar"));}
catch(Exception e){System.out.println("codigo debe ser numerico");}
miUsuario.consultar2(codigo);
// JOptionPane.showMessageDialog(this, "BIEN CTM");
this.txtUsuario.setText(miUsuario.getUsuario());
this.txtid.setText(""+miUsuario.getId());
this.txtNombre.setText(miUsuario.getNombre());
this.txtApellido.setText(miUsuario.getApellido());
this.txtClave.setText(miUsuario.getClave());
}//GEN-LAST:event_btnBuscarMouseClicked
private void btnMostrarTableMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnMostrarTableMouseClicked
// TODO add your handling code here:
// TODO add your handling code here:
try{
DefaultTableModel modelo = new DefaultTableModel();
//this.jt1.setModel(vista);
this.jt1.setModel(modelo);
String conexion = "jdbc:odbc:venta";
try {
con= DriverManager.getConnection(conexion, "root","1234");
} catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");}
// String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA";
String consulta = "select ID,NOMBRE,APELLIDO,USUARIO,CLAVE from usuario order by ID";
System.out.println(consulta);
try{stmt=con.createStatement();
rs=stmt.executeQuery(consulta);
}catch (Exception e){System.out.println ("error en la consulta");}
ResultSetMetaData rsm = rs.getMetaData();
int cantidadColumnas = rsm.getColumnCount();
for(int i =1;i<=cantidadColumnas;i++)
{ modelo.addColumn(rsm.getColumnLabel(i));}
while(rs.next()){
Object[] fila = new Object[cantidadColumnas];
for(int i = 0;i<cantidadColumnas;i++){
fila[i]=rs.getObject(i+1);
}
modelo.addRow(fila);
}
rs.close();
}catch (Exception ex){ex.printStackTrace();}
}//GEN-LAST:event_btnMostrarTableMouseClicked
private void btnBuscarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnBuscarActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_btnBuscarActionPerformed
private void btnActualizarMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnActualizarMouseClicked
// TODO add your handling code here:
if(this.txtid.getText().equals("")|| this.txtApellido.getText().equals("")
||this.txtClave.getText().equals("")||this.txtNombre.getText().equals("")){
String mensaje="INGRESE PARAMETROS";
System.out.println(mensaje);
JOptionPane.showMessageDialog(this, mensaje);
}
else{
JOptionPane.showMessageDialog(null,"Registro Actualizado Correctamente");
miUsuario.setNombre(this.txtNombre.getText());
miUsuario.setApellido(this.txtApellido.getText());
miUsuario.setUsuario(this.txtUsuario.getText());
miUsuario.setClave(this.txtClave.getText());
int codigoUsuario=Integer.parseInt(this.txtid.getText());
miUsuario.actualizarUsuario(codigoUsuario);
}
}//GEN-LAST:event_btnActualizarMouseClicked
private void btnEliminarMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnEliminarMouseClicked
//blanqueo los texboxes
boolean exito = true;
this.txtid.setText("");
this.txtNombre.setText("");
this.txtApellido.setText("");
this.txtUsuario.setText("");
this.txtClave.setText("");
// maneja la eliminacion
miUsuario.eliminar();
}//GEN-LAST:event_btnEliminarMouseClicked
private void btnNuevoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnNuevoActionPerformed
// TODO add your handling code here:
limpiar();
DefaultTableModel modelo = (DefaultTableModel) jt1.getModel();
modelo.setRowCount(0);
jt1.updateUI();
}//GEN-LAST:event_btnNuevoActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(frmUsuarios.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(frmUsuarios.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(frmUsuarios.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(frmUsuarios.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new frmUsuarios().setVisible(true);
}
});
}
private void limpiar(){
this.txtid.setText("");
this.txtNombre.setText("");
this.txtApellido.setText("");
this.txtUsuario.setText("");
this.txtClave.setText("");
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnAceptar;
private javax.swing.JButton btnActualizar;
private javax.swing.JButton btnAgregar;
private javax.swing.JButton btnBuscar;
private javax.swing.JButton btnConsultar;
private javax.swing.JButton btnEliminar;
private javax.swing.JButton btnMostrarTable;
private javax.swing.JButton btnNuevo;
private javax.swing.JButton jButton4;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTable jt1;
private javax.swing.JTextField txtApellido;
private javax.swing.JTextField txtClave;
private javax.swing.JTextField txtNombre;
private javax.swing.JTextField txtUsuario;
private javax.swing.JTextField txtid;
// End of variables declaration//GEN-END:variables
}
| |
// XMLWriter.java - serialize an XML document.
// Written by David Megginson, david@megginson.com
// and placed by him into the public domain.
// Extensively modified by John Cowan for TagSoup.
// TagSoup is licensed under the Apache License,
// Version 2.0. You may obtain a copy of this license at
// http://www.apache.org/licenses/LICENSE-2.0 . You may also have
// additional legal rights not granted by this license.
//
// TagSoup is distributed in the hope that it will be useful, but
// unless required by applicable law or agreed to in writing, TagSoup
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, either express or implied; not even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
package org.ccil.cowan.tagsoup;
import org.xml.sax.Attributes;
/**
* Default implementation of the Attributes interface.
*
* <blockquote>
* <em>This module, both source code and documentation, is in the
* Public Domain, and comes with <strong>NO WARRANTY</strong>.</em>
* See <a href='http://www.saxproject.org'>http://www.saxproject.org</a>
* for further information.
* </blockquote>
*
* <p>This class provides a default implementation of the SAX2
* {@link org.xml.sax.Attributes Attributes} interface, with the
* addition of manipulators so that the list can be modified or
* reused.</p>
*
* <p>There are two typical uses of this class:</p>
*
* <ol>
* <li>to take a persistent snapshot of an Attributes object
* in a {@link org.xml.sax.ContentHandler#startElement startElement} event; or</li>
* <li>to construct or modify an Attributes object in a SAX2 driver or filter.</li>
* </ol>
*
* <p>This class replaces the now-deprecated SAX1 {@link
* org.xml.sax.helpers.AttributeListImpl AttributeListImpl}
* class; in addition to supporting the updated Attributes
* interface rather than the deprecated {@link org.xml.sax.AttributeList
* AttributeList} interface, it also includes a much more efficient
* implementation using a single array rather than a set of Vectors.</p>
*
* @since SAX 2.0
* @author David Megginson
* @version 2.0.1 (sax2r2)
*/
public class AttributesImpl implements Attributes
{
////////////////////////////////////////////////////////////////////
// Constructors.
////////////////////////////////////////////////////////////////////
/**
* Construct a new, empty AttributesImpl object.
*/
public AttributesImpl ()
{
length = 0;
data = null;
}
/**
* Copy an existing Attributes object.
*
* <p>This constructor is especially useful inside a
* {@link org.xml.sax.ContentHandler#startElement startElement} event.</p>
*
* @param atts The existing Attributes object.
*/
public AttributesImpl (Attributes atts)
{
setAttributes(atts);
}
////////////////////////////////////////////////////////////////////
// Implementation of org.xml.sax.Attributes.
////////////////////////////////////////////////////////////////////
/**
* Return the number of attributes in the list.
*
* @return The number of attributes in the list.
* @see org.xml.sax.Attributes#getLength
*/
public int getLength ()
{
return length;
}
/**
* Return an attribute's Namespace URI.
*
* @param index The attribute's index (zero-based).
* @return The Namespace URI, the empty string if none is
* available, or null if the index is out of range.
* @see org.xml.sax.Attributes#getURI
*/
public String getURI (int index)
{
if (index >= 0 && index < length) {
return data[index*5];
} else {
return null;
}
}
/**
* Return an attribute's local name.
*
* @param index The attribute's index (zero-based).
* @return The attribute's local name, the empty string if
* none is available, or null if the index if out of range.
* @see org.xml.sax.Attributes#getLocalName
*/
public String getLocalName (int index)
{
if (index >= 0 && index < length) {
return data[index*5+1];
} else {
return null;
}
}
/**
* Return an attribute's qualified (prefixed) name.
*
* @param index The attribute's index (zero-based).
* @return The attribute's qualified name, the empty string if
* none is available, or null if the index is out of bounds.
* @see org.xml.sax.Attributes#getQName
*/
public String getQName (int index)
{
if (index >= 0 && index < length) {
return data[index*5+2];
} else {
return null;
}
}
/**
* Return an attribute's type by index.
*
* @param index The attribute's index (zero-based).
* @return The attribute's type, "CDATA" if the type is unknown, or null
* if the index is out of bounds.
* @see org.xml.sax.Attributes#getType(int)
*/
public String getType (int index)
{
if (index >= 0 && index < length) {
return data[index*5+3];
} else {
return null;
}
}
/**
* Return an attribute's value by index.
*
* @param index The attribute's index (zero-based).
* @return The attribute's value or null if the index is out of bounds.
* @see org.xml.sax.Attributes#getValue(int)
*/
public String getValue (int index)
{
if (index >= 0 && index < length) {
return data[index*5+4];
} else {
return null;
}
}
/**
* Look up an attribute's index by Namespace name.
*
* <p>In many cases, it will be more efficient to look up the name once and
* use the index query methods rather than using the name query methods
* repeatedly.</p>
*
* @param uri The attribute's Namespace URI, or the empty
* string if none is available.
* @param localName The attribute's local name.
* @return The attribute's index, or -1 if none matches.
* @see org.xml.sax.Attributes#getIndex(java.lang.String,java.lang.String)
*/
public int getIndex (String uri, String localName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i].equals(uri) && data[i+1].equals(localName)) {
return i / 5;
}
}
return -1;
}
/**
* Look up an attribute's index by qualified (prefixed) name.
*
* @param qName The qualified name.
* @return The attribute's index, or -1 if none matches.
* @see org.xml.sax.Attributes#getIndex(java.lang.String)
*/
public int getIndex (String qName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i+2].equals(qName)) {
return i / 5;
}
}
return -1;
}
/**
* Look up an attribute's type by Namespace-qualified name.
*
* @param uri The Namespace URI, or the empty string for a name
* with no explicit Namespace URI.
* @param localName The local name.
* @return The attribute's type, or null if there is no
* matching attribute.
* @see org.xml.sax.Attributes#getType(java.lang.String,java.lang.String)
*/
public String getType (String uri, String localName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i].equals(uri) && data[i+1].equals(localName)) {
return data[i+3];
}
}
return null;
}
/**
* Look up an attribute's type by qualified (prefixed) name.
*
* @param qName The qualified name.
* @return The attribute's type, or null if there is no
* matching attribute.
* @see org.xml.sax.Attributes#getType(java.lang.String)
*/
public String getType (String qName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i+2].equals(qName)) {
return data[i+3];
}
}
return null;
}
/**
* Look up an attribute's value by Namespace-qualified name.
*
* @param uri The Namespace URI, or the empty string for a name
* with no explicit Namespace URI.
* @param localName The local name.
* @return The attribute's value, or null if there is no
* matching attribute.
* @see org.xml.sax.Attributes#getValue(java.lang.String,java.lang.String)
*/
public String getValue (String uri, String localName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i].equals(uri) && data[i+1].equals(localName)) {
return data[i+4];
}
}
return null;
}
/**
* Look up an attribute's value by qualified (prefixed) name.
*
* @param qName The qualified name.
* @return The attribute's value, or null if there is no
* matching attribute.
* @see org.xml.sax.Attributes#getValue(java.lang.String)
*/
public String getValue (String qName)
{
int max = length * 5;
for (int i = 0; i < max; i += 5) {
if (data[i+2].equals(qName)) {
return data[i+4];
}
}
return null;
}
////////////////////////////////////////////////////////////////////
// Manipulators.
////////////////////////////////////////////////////////////////////
/**
* Clear the attribute list for reuse.
*
* <p>Note that little memory is freed by this call:
* the current array is kept so it can be
* reused.</p>
*/
public void clear ()
{
if (data != null) {
for (int i = 0; i < (length * 5); i++)
data [i] = null;
}
length = 0;
}
/**
* Copy an entire Attributes object.
*
* <p>It may be more efficient to reuse an existing object
* rather than constantly allocating new ones.</p>
*
* @param atts The attributes to copy.
*/
public void setAttributes (Attributes atts)
{
clear();
length = atts.getLength();
if (length > 0) {
data = new String[length*5];
for (int i = 0; i < length; i++) {
data[i*5] = atts.getURI(i);
data[i*5+1] = atts.getLocalName(i);
data[i*5+2] = atts.getQName(i);
data[i*5+3] = atts.getType(i);
data[i*5+4] = atts.getValue(i);
}
}
}
/**
* Add an attribute to the end of the list.
*
* <p>For the sake of speed, this method does no checking
* to see if the attribute is already in the list: that is
* the responsibility of the application.</p>
*
* @param uri The Namespace URI, or the empty string if
* none is available or Namespace processing is not
* being performed.
* @param localName The local name, or the empty string if
* Namespace processing is not being performed.
* @param qName The qualified (prefixed) name, or the empty string
* if qualified names are not available.
* @param type The attribute type as a string.
* @param value The attribute value.
*/
public void addAttribute (String uri, String localName, String qName,
String type, String value)
{
ensureCapacity(length+1);
data[length*5] = uri;
data[length*5+1] = localName;
data[length*5+2] = qName;
data[length*5+3] = type;
data[length*5+4] = value;
length++;
}
/**
* Set an attribute in the list.
*
* <p>For the sake of speed, this method does no checking
* for name conflicts or well-formedness: such checks are the
* responsibility of the application.</p>
*
* @param index The index of the attribute (zero-based).
* @param uri The Namespace URI, or the empty string if
* none is available or Namespace processing is not
* being performed.
* @param localName The local name, or the empty string if
* Namespace processing is not being performed.
* @param qName The qualified name, or the empty string
* if qualified names are not available.
* @param type The attribute type as a string.
* @param value The attribute value.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setAttribute (int index, String uri, String localName,
String qName, String type, String value)
{
if (index >= 0 && index < length) {
data[index*5] = uri;
data[index*5+1] = localName;
data[index*5+2] = qName;
data[index*5+3] = type;
data[index*5+4] = value;
} else {
badIndex(index);
}
}
/**
* Remove an attribute from the list.
*
* @param index The index of the attribute (zero-based).
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void removeAttribute (int index)
{
if (index >= 0 && index < length) {
if (index < length - 1) {
System.arraycopy(data, (index+1)*5, data, index*5,
(length-index-1)*5);
}
index = (length - 1) * 5;
data [index++] = null;
data [index++] = null;
data [index++] = null;
data [index++] = null;
data [index] = null;
length--;
} else {
badIndex(index);
}
}
/**
* Set the Namespace URI of a specific attribute.
*
* @param index The index of the attribute (zero-based).
* @param uri The attribute's Namespace URI, or the empty
* string for none.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setURI (int index, String uri)
{
if (index >= 0 && index < length) {
data[index*5] = uri;
} else {
badIndex(index);
}
}
/**
* Set the local name of a specific attribute.
*
* @param index The index of the attribute (zero-based).
* @param localName The attribute's local name, or the empty
* string for none.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setLocalName (int index, String localName)
{
if (index >= 0 && index < length) {
data[index*5+1] = localName;
} else {
badIndex(index);
}
}
/**
* Set the qualified name of a specific attribute.
*
* @param index The index of the attribute (zero-based).
* @param qName The attribute's qualified name, or the empty
* string for none.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setQName (int index, String qName)
{
if (index >= 0 && index < length) {
data[index*5+2] = qName;
} else {
badIndex(index);
}
}
/**
* Set the type of a specific attribute.
*
* @param index The index of the attribute (zero-based).
* @param type The attribute's type.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setType (int index, String type)
{
if (index >= 0 && index < length) {
data[index*5+3] = type;
} else {
badIndex(index);
}
}
/**
* Set the value of a specific attribute.
*
* @param index The index of the attribute (zero-based).
* @param value The attribute's value.
* @exception java.lang.ArrayIndexOutOfBoundsException When the
* supplied index does not point to an attribute
* in the list.
*/
public void setValue (int index, String value)
{
if (index >= 0 && index < length) {
data[index*5+4] = value;
} else {
badIndex(index);
}
}
////////////////////////////////////////////////////////////////////
// Internal methods.
////////////////////////////////////////////////////////////////////
/**
* Ensure the internal array's capacity.
*
* @param n The minimum number of attributes that the array must
* be able to hold.
*/
private void ensureCapacity (int n) {
if (n <= 0) {
return;
}
int max;
if (data == null || data.length == 0) {
max = 25;
}
else if (data.length >= n * 5) {
return;
}
else {
max = data.length;
}
while (max < n * 5) {
max *= 2;
}
String newData[] = new String[max];
if (length > 0) {
System.arraycopy(data, 0, newData, 0, length*5);
}
data = newData;
}
/**
* Report a bad array index in a manipulator.
*
* @param index The index to report.
* @exception java.lang.ArrayIndexOutOfBoundsException Always.
*/
private void badIndex (int index)
throws ArrayIndexOutOfBoundsException
{
String msg =
"Attempt to modify attribute at illegal index: " + index;
throw new ArrayIndexOutOfBoundsException(msg);
}
////////////////////////////////////////////////////////////////////
// Internal state.
////////////////////////////////////////////////////////////////////
int length;
String data [];
}
// end of AttributesImpl.java
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.hops;
import java.util.ArrayList;
import org.apache.sysml.hops.rewrite.HopRewriteUtils;
import org.apache.sysml.lops.Aggregate;
import org.apache.sysml.lops.Group;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.lops.LopsException;
import org.apache.sysml.lops.SortKeys;
import org.apache.sysml.lops.Transform;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
/**
* Reorg (cell) operation: aij
* Properties:
* Symbol: ', rdiag, rshape, rsort
* 1 Operand
*
* Semantic: change indices (in mapper or reducer)
*
*
* NOTE MB: reshape integrated here because (1) ParameterizedBuiltinOp requires name-value pairs for params
* and (2) most importantly semantic of reshape is exactly a reorg op.
*/
public class ReorgOp extends Hop
{
public static boolean FORCE_DIST_SORT_INDEXES = false;
public boolean bSortSPRewriteApplicable = false;
private ReOrgOp op;
private ReorgOp() {
//default constructor for clone
}
public ReorgOp(String l, DataType dt, ValueType vt, ReOrgOp o, Hop inp)
{
super(l, dt, vt);
op = o;
getInput().add(0, inp);
inp.getParent().add(this);
//compute unknown dims and nnz
refreshSizeInformation();
}
public ReorgOp(String l, DataType dt, ValueType vt, ReOrgOp o, ArrayList<Hop> inp)
{
super(l, dt, vt);
op = o;
for( int i=0; i<inp.size(); i++ ) {
Hop in = inp.get(i);
getInput().add(i, in);
in.getParent().add(this);
}
//compute unknown dims and nnz
refreshSizeInformation();
}
public ReOrgOp getOp()
{
return op;
}
@Override
public String getOpString() {
String s = new String("");
s += "r(" + HopsTransf2String.get(op) + ")";
return s;
}
@Override
public Lop constructLops()
throws HopsException, LopsException
{
//return already created lops
if( getLops() != null )
return getLops();
ExecType et = optFindExecType();
switch( op )
{
case TRANSPOSE:
case DIAG:
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
setLops(transform1);
break;
}
case RESHAPE:
{
if( et==ExecType.MR )
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
for( int i=1; i<=3; i++ ) //rows, cols, byrow
{
Lop ltmp = getInput().get(i).constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
Group group1 = new Group(
transform1, Group.OperationTypes.Sort, DataType.MATRIX,
getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(
group1, Aggregate.OperationTypes.Sum, DataType.MATRIX,
getValueType(), et);
setOutputDimensions(agg1);
setLineNumbers(agg1);
setLops(agg1);
}
else //CP/SPARK
{
Transform transform1 = new Transform( getInput().get(0).constructLops(),
HopsTransf2Lops.get(op), getDataType(), getValueType(), et);
setOutputDimensions(transform1);
setLineNumbers(transform1);
for( int i=1; i<=3; i++ ) //rows, cols, byrow
{
Lop ltmp = getInput().get(i).constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
setLops(transform1);
}
break;
}
case SORT:
{
Hop input = getInput().get(0);
Hop by = getInput().get(1);
Hop desc = getInput().get(2);
Hop ixret = getInput().get(3);
if( et==ExecType.MR )
{
if( !(desc instanceof LiteralOp && ixret instanceof LiteralOp) ) {
LOG.warn("Unsupported non-constant ordering parameters, using defaults and mark for recompilation.");
setRequiresRecompile();
desc = new LiteralOp(false);
ixret = new LiteralOp(false);
}
//Step 1: extraction (if unknown ncol or multiple columns)
Hop vinput = input;
if( input.getDim2() != 1 ) {
vinput = new IndexingOp("tmp1", getDataType(), getValueType(), input, new LiteralOp(1L),
HopRewriteUtils.createValueHop(input, true), by, by, false, true);
vinput.refreshSizeInformation();
HopRewriteUtils.setOutputBlocksizes(vinput, getRowsInBlock(), getColsInBlock());
HopRewriteUtils.copyLineNumbers(this, vinput);
}
//Step 2: Index vector sort
Hop voutput = null;
if( 2*OptimizerUtils.estimateSize(vinput.getDim1(), vinput.getDim2())
> OptimizerUtils.getLocalMemBudget()
|| FORCE_DIST_SORT_INDEXES )
{
//large vector, fallback to MR sort
//sort indexes according to given values
SortKeys sort = new SortKeys(
vinput.constructLops(), HopRewriteUtils.getBooleanValueSafe((LiteralOp)desc),
SortKeys.OperationTypes.Indexes,
vinput.getDataType(), vinput.getValueType(), ExecType.MR);
sort.getOutputParameters().setDimensions(vinput.getDim1(), 1,
vinput.getRowsInBlock(), vinput.getColsInBlock(), vinput.getNnz());
setLineNumbers(sort);
//note: this sortindexes includes also the shift by offsets and
//final aggregate because sideways passing of offsets would
//not nicely fit the current instruction model
setLops(sort);
voutput = this;
}
else
{
//small vector, use in-memory sort
ArrayList<Hop> sinputs = new ArrayList<Hop>();
sinputs.add(vinput);
sinputs.add(new LiteralOp(1)); //by (always vector)
sinputs.add(desc);
sinputs.add(new LiteralOp(true)); //indexreturn (always indexes)
voutput = new ReorgOp("tmp3", getDataType(), getValueType(), ReOrgOp.SORT, sinputs);
HopRewriteUtils.copyLineNumbers(this, voutput);
//explicitly construct CP lop; otherwise there is danger of infinite recursion if forced runtime platform.
voutput.setLops( constructCPOrSparkSortLop(vinput, sinputs.get(1), sinputs.get(2), sinputs.get(3), ExecType.CP, false) );
voutput.getLops().getOutputParameters().setDimensions(vinput.getDim1(), vinput.getDim2(), vinput.getRowsInBlock(), vinput.getColsInBlock(), vinput.getNnz());
setLops( voutput.constructLops() );
}
//Step 3: Data permutation (only required for sorting data)
// -- done via X' = table(seq(), IX') %*% X;
if( !HopRewriteUtils.getBooleanValueSafe((LiteralOp)ixret) )
{
//generate seq
DataGenOp seq = HopRewriteUtils.createSeqDataGenOp(voutput);
seq.setName("tmp4");
seq.refreshSizeInformation();
seq.computeMemEstimate(new MemoTable()); //select exec type
HopRewriteUtils.copyLineNumbers(this, seq);
//generate table
TernaryOp table = new TernaryOp("tmp5", DataType.MATRIX, ValueType.DOUBLE, OpOp3.CTABLE, seq, voutput, new LiteralOp(1L) );
HopRewriteUtils.setOutputBlocksizes(table, getRowsInBlock(), getColsInBlock());
table.refreshSizeInformation();
table.setForcedExecType(ExecType.MR); //force MR
HopRewriteUtils.copyLineNumbers(this, table);
table.setDisjointInputs(true);
table.setOutputEmptyBlocks(false);
//generate matrix mult
AggBinaryOp mmult = HopRewriteUtils.createMatrixMultiply(table, input);
mmult.setForcedExecType(ExecType.MR); //force MR
setLops( mmult.constructLops() );
//cleanups
HopRewriteUtils.removeChildReference(table, input);
}
}
else //CP or Spark
{
if( et==ExecType.SPARK && !FORCE_DIST_SORT_INDEXES)
bSortSPRewriteApplicable = isSortSPRewriteApplicable();
Lop transform1 = constructCPOrSparkSortLop(input, by, desc, ixret, et, bSortSPRewriteApplicable);
setOutputDimensions(transform1);
setLineNumbers(transform1);
setLops(transform1);
}
break;
}
default:
throw new HopsException("Unsupported lops construction for operation type '"+op+"'.");
}
//add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();
return getLops();
}
private static Lop constructCPOrSparkSortLop( Hop input, Hop by, Hop desc, Hop ixret, ExecType et, boolean bSortIndInMem )
throws HopsException, LopsException
{
Transform transform1 = new Transform( input.constructLops(), HopsTransf2Lops.get(ReOrgOp.SORT),
input.getDataType(), input.getValueType(), et, bSortIndInMem);
for( Hop c : new Hop[]{by,desc,ixret} ) {
Lop ltmp = c.constructLops();
transform1.addInput(ltmp);
ltmp.addOutput(transform1);
}
transform1.setLevel(); //force order of added lops
return transform1;
}
@Override
protected double computeOutputMemEstimate( long dim1, long dim2, long nnz )
{
//no dedicated mem estimation per op type, because always propagated via refreshSizeInformation
double sparsity = OptimizerUtils.getSparsity(dim1, dim2, nnz);
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);
}
@Override
protected double computeIntermediateMemEstimate( long dim1, long dim2, long nnz )
{
if( op == ReOrgOp.SORT )
{
Hop ixreturn = getInput().get(3);
if( !(ixreturn instanceof LiteralOp && !HopRewriteUtils.getBooleanValueSafe((LiteralOp)ixreturn)
&& (dim2==1 || nnz==0) ) ) //NOT early abort case
{
//Version 2: memory requirements for temporary index int[] array,
//(temporary double[] array already covered by output)
return dim1 * 4;
//Version 1: memory requirements for temporary index Integer[] array
//8-16 (12) bytes for object, 4byte int payload, 4-8 (8) byte pointers.
//return dim1 * 24;
}
}
//default: no intermediate memory requirements
return 0;
}
@Override
protected long[] inferOutputCharacteristics( MemoTable memo )
{
long[] ret = null;
Hop input = getInput().get(0);
MatrixCharacteristics mc = memo.getAllInputStats(input);
switch(op)
{
case TRANSPOSE:
{
// input is a [k1,k2] matrix and output is a [k2,k1] matrix
// #nnz in output is exactly the same as in input
if( mc.dimsKnown() )
ret = new long[]{ mc.getCols(), mc.getRows(), mc.getNonZeros() };
break;
}
case DIAG:
{
// NOTE: diag is overloaded according to the number of columns of the input
long k = mc.getRows();
// CASE a) DIAG V2M
// input is a [1,k] or [k,1] matrix, and output is [k,k] matrix
// #nnz in output is in the worst case k => sparsity = 1/k
if( k == 1 )
ret = new long[]{k, k, ((mc.getNonZeros()>=0) ? mc.getNonZeros() : k)};
// CASE b) DIAG M2V
// input is [k,k] matrix and output is [k,1] matrix
// #nnz in the output is likely to be k (a dense matrix)
if( k > 1 )
ret = new long[]{k, 1, ((mc.getNonZeros()>=0) ? Math.min(k,mc.getNonZeros()) : k) };
break;
}
case RESHAPE:
{
// input is a [k1,k2] matrix and output is a [k3,k4] matrix with k1*k2=k3*k4
// #nnz in output is exactly the same as in input
if( mc.dimsKnown() ) {
if( _dim1 > 0 )
ret = new long[]{ _dim1, mc.getRows()*mc.getCols()/_dim1, mc.getNonZeros()};
else if( _dim2 > 0 )
ret = new long[]{ mc.getRows()*mc.getCols()/_dim2, _dim2, mc.getNonZeros()};
}
break;
}
case SORT:
{
// input is a [k1,k2] matrix and output is a [k1,k3] matrix, where k3=k2 if no index return;
// otherwise k3=1 (for the index vector)
Hop input4 = getInput().get(3); //indexreturn
boolean unknownIxRet = !(input4 instanceof LiteralOp);
if( !unknownIxRet ) {
boolean ixret = HopRewriteUtils.getBooleanValueSafe((LiteralOp)input4);
long dim2 = ixret ? 1 : mc.getCols();
long nnz = ixret ? mc.getRows() : mc.getNonZeros();
ret = new long[]{ mc.getRows(), dim2, nnz};
}
else {
ret = new long[]{ mc.getRows(), -1, -1};
}
}
}
return ret;
}
@Override
public boolean allowsAllExecTypes()
{
return true;
}
@Override
protected ExecType optFindExecType() throws HopsException {
checkAndSetForcedPlatform();
ExecType REMOTE = OptimizerUtils.isSparkExecutionMode() ? ExecType.SPARK : ExecType.MR;
if( _etypeForced != null )
{
_etype = _etypeForced;
}
else
{
if ( OptimizerUtils.isMemoryBasedOptLevel() ) {
_etype = findExecTypeByMemEstimate();
}
// Choose CP, if the input dimensions are below threshold or if the input is a vector
else if ( getInput().get(0).areDimsBelowThreshold() || getInput().get(0).isVector() )
{
_etype = ExecType.CP;
}
else
{
_etype = REMOTE;
}
//check for valid CP dimensions and matrix size
checkAndSetInvalidCPDimsAndSize();
}
//mark for recompile (forever)
if( OptimizerUtils.ALLOW_DYN_RECOMPILATION && !dimsKnown(true) && _etype==REMOTE )
setRequiresRecompile();
return _etype;
}
@Override
public void refreshSizeInformation()
{
Hop input1 = getInput().get(0);
switch(op)
{
case TRANSPOSE:
{
// input is a [k1,k2] matrix and output is a [k2,k1] matrix
// #nnz in output is exactly the same as in input
setDim1(input1.getDim2());
setDim2(input1.getDim1());
setNnz(input1.getNnz());
break;
}
case DIAG:
{
// NOTE: diag is overloaded according to the number of columns of the input
long k = input1.getDim1();
setDim1(k);
// CASE a) DIAG_V2M
// input is a [1,k] or [k,1] matrix, and output is [k,k] matrix
// #nnz in output is in the worst case k => sparsity = 1/k
if( input1.getDim2()==1 ) {
setDim2(k);
setNnz( (input1.getNnz()>=0) ? input1.getNnz() : k );
}
// CASE b) DIAG_M2V
// input is [k,k] matrix and output is [k,1] matrix
// #nnz in the output is likely to be k (a dense matrix)
if( input1.getDim2()>1 ){
setDim2(1);
setNnz( (input1.getNnz()>=0) ? Math.min(k,input1.getNnz()) : k );
}
break;
}
case RESHAPE:
{
// input is a [k1,k2] matrix and output is a [k3,k4] matrix with k1*k2=k3*k4
// #nnz in output is exactly the same as in input
Hop input2 = getInput().get(1); //rows
Hop input3 = getInput().get(2); //cols
refreshRowsParameterInformation(input2); //refresh rows
refreshColsParameterInformation(input3); //refresh cols
setNnz(input1.getNnz());
if( !dimsKnown() &&input1.dimsKnown() ) { //reshape allows to infer dims, if input and 1 dim known
if(_dim1 > 0)
_dim2 = (input1._dim1*input1._dim2)/_dim1;
else if(_dim2 > 0)
_dim1 = (input1._dim1*input1._dim2)/_dim2;
}
break;
}
case SORT:
{
// input is a [k1,k2] matrix and output is a [k1,k3] matrix, where k3=k2 if no index return;
// otherwise k3=1 (for the index vector)
Hop input4 = getInput().get(3); //indexreturn
boolean unknownIxRet = !(input4 instanceof LiteralOp);
_dim1 = input1.getDim1();
if( !unknownIxRet ) {
boolean ixret = HopRewriteUtils.getBooleanValueSafe((LiteralOp)input4);
_dim2 = ixret ? 1 : input1.getDim2();
_nnz = ixret ? input1.getDim1() : input1.getNnz();
}
else {
_dim2 = -1;
_nnz = -1;
}
break;
}
}
}
@Override
public Object clone() throws CloneNotSupportedException
{
ReorgOp ret = new ReorgOp();
//copy generic attributes
ret.clone(this, false);
//copy specific attributes
ret.op = op;
return ret;
}
@Override
public boolean compare( Hop that )
{
if( !(that instanceof ReorgOp) )
return false;
ReorgOp that2 = (ReorgOp)that;
boolean ret = (op == that2.op)
&& (getInput().size()==that.getInput().size());
//compare all childs (see reshape, sort)
if( ret ) //sizes matched
for( int i=0; i<_input.size(); i++ )
ret &= getInput().get(i) == that2.getInput().get(i);
return ret;
}
@Override
public void printMe() throws HopsException
{
if (LOG.isDebugEnabled()){
if (getVisited() != VisitStatus.DONE) {
super.printMe();
LOG.debug(" Operation: " + op);
for (Hop h : getInput()) {
h.printMe();
}
}
setVisited(VisitStatus.DONE);
}
}
/**
* This will check if there is sufficient memory locally (twice the size of second matrix, for original and sort data), and remotely (size of second matrix (sorted data)).
* @return
*/
private boolean isSortSPRewriteApplicable()
{
boolean ret = false;
Hop input = getInput().get(0);
//note: both cases (partitioned matrix, and sorted double array), require to
//fit the broadcast twice into the local memory budget. Also, the memory
//constraint only needs to take the rhs into account because the output is
//guaranteed to be an aggregate of <=16KB
double size = input.dimsKnown() ?
OptimizerUtils.estimateSize(input.getDim1(), 1) : //dims known and estimate fits
input.getOutputMemEstimate(); //dims unknown but worst-case estimate fits
if( OptimizerUtils.checkSparkBroadcastMemoryBudget(size) ) {
ret = true;
}
return ret;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.query.input;
import org.wso2.siddhi.core.config.SiddhiAppContext;
import org.wso2.siddhi.core.event.ComplexEvent;
import org.wso2.siddhi.core.event.ComplexEventChunk;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.event.stream.MetaStreamEvent;
import org.wso2.siddhi.core.event.stream.StreamEvent;
import org.wso2.siddhi.core.event.stream.StreamEventPool;
import org.wso2.siddhi.core.event.stream.converter.StreamEventConverter;
import org.wso2.siddhi.core.event.stream.converter.StreamEventConverterFactory;
import org.wso2.siddhi.core.query.output.ratelimit.OutputRateLimiter;
import org.wso2.siddhi.core.query.processor.Processor;
import org.wso2.siddhi.core.util.statistics.LatencyTracker;
import java.util.ArrayList;
import java.util.List;
/**
* {@link org.wso2.siddhi.core.stream.StreamJunction.Receiver} implementation to receive events to be fed into multi
* stream processors which consume multiple streams.
*/
public class MultiProcessStreamReceiver extends ProcessStreamReceiver {
protected Processor[] nextProcessors;
protected int processCount;
protected int[] eventSequence;
protected String queryName;
protected OutputRateLimiter outputRateLimiter;
private MetaStreamEvent[] metaStreamEvents;
private StreamEventPool[] streamEventPools;
private StreamEventConverter[] streamEventConverters;
private static ThreadLocal<ReturnEventHolder> multiProcessReturn = new ThreadLocal<>();
public MultiProcessStreamReceiver(String streamId, int processCount, LatencyTracker latencyTracker,
String queryName, SiddhiAppContext siddhiAppContext) {
super(streamId, latencyTracker, queryName, siddhiAppContext);
this.processCount = processCount;
this.queryName = queryName;
nextProcessors = new Processor[processCount];
metaStreamEvents = new MetaStreamEvent[processCount];
streamEventPools = new StreamEventPool[processCount];
streamEventConverters = new StreamEventConverter[processCount];
eventSequence = new int[processCount];
for (int i = 0; i < eventSequence.length; i++) {
eventSequence[i] = i;
}
}
public MultiProcessStreamReceiver clone(String key) {
return new MultiProcessStreamReceiver(streamId + key, processCount, latencyTracker, queryName,
siddhiAppContext);
}
private void process(int eventSequence, StreamEvent borrowedEvent) {
if (lockWrapper != null) {
lockWrapper.lock();
}
try {
if (latencyTracker != null) {
try {
latencyTracker.markIn();
processAndClear(eventSequence, borrowedEvent);
} finally {
latencyTracker.markOut();
}
} else {
processAndClear(eventSequence, borrowedEvent);
}
} finally {
if (lockWrapper != null) {
lockWrapper.unlock();
}
}
}
@Override
public void receive(ComplexEvent complexEvent) {
ComplexEvent aComplexEvent = complexEvent;
while (aComplexEvent != null) {
if (outputRateLimiter == null) {
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertComplexEvent(aComplexEvent, borrowedEvent);
process(anEventSequence, borrowedEvent);
}
}
} else {
List<ReturnEventHolder> returnEventHolderList = new ArrayList<>(eventSequence.length);
try {
multiProcessReturn.set(new ReturnEventHolder());
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertComplexEvent(aComplexEvent, borrowedEvent);
process(anEventSequence, borrowedEvent);
if (multiProcessReturn.get() != null &&
multiProcessReturn.get().complexEventChunk != null) {
returnEventHolderList.add(multiProcessReturn.get());
multiProcessReturn.set(new ReturnEventHolder());
}
}
}
} finally {
multiProcessReturn.set(null);
}
for (ReturnEventHolder returnEventHolder : returnEventHolderList) {
outputRateLimiter.sendToCallBacks(returnEventHolder.complexEventChunk);
}
}
aComplexEvent = aComplexEvent.getNext();
}
}
@Override
public void receive(Event event) {
if (outputRateLimiter == null) {
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
}
}
} else {
List<ReturnEventHolder> returnEventHolderList = new ArrayList<>(eventSequence.length);
try {
multiProcessReturn.set(new ReturnEventHolder());
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
if (multiProcessReturn.get() != null &&
multiProcessReturn.get().complexEventChunk != null) {
returnEventHolderList.add(multiProcessReturn.get());
multiProcessReturn.set(new ReturnEventHolder());
}
}
}
} finally {
multiProcessReturn.set(null);
}
for (ReturnEventHolder returnEventHolder : returnEventHolderList) {
outputRateLimiter.sendToCallBacks(returnEventHolder.complexEventChunk);
}
}
}
@Override
public void receive(Event[] events) {
for (Event event : events) {
if (outputRateLimiter == null) {
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
}
}
} else {
List<ReturnEventHolder> returnEventHolderList = new ArrayList<>(eventSequence.length);
try {
multiProcessReturn.set(new ReturnEventHolder());
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
if (multiProcessReturn.get() != null &&
multiProcessReturn.get().complexEventChunk != null) {
returnEventHolderList.add(multiProcessReturn.get());
multiProcessReturn.set(new ReturnEventHolder());
}
}
}
} finally {
multiProcessReturn.set(null);
}
for (ReturnEventHolder returnEventHolder : returnEventHolderList) {
outputRateLimiter.sendToCallBacks(returnEventHolder.complexEventChunk);
}
}
}
}
@Override
public void receive(List<Event> events) {
for (Event event : events) {
if (outputRateLimiter == null) {
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
}
}
} else {
List<ReturnEventHolder> returnEventHolderList = new ArrayList<>(eventSequence.length);
try {
multiProcessReturn.set(new ReturnEventHolder());
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertEvent(event, borrowedEvent);
process(anEventSequence, borrowedEvent);
if (multiProcessReturn.get() != null &&
multiProcessReturn.get().complexEventChunk != null) {
returnEventHolderList.add(multiProcessReturn.get());
multiProcessReturn.set(new ReturnEventHolder());
}
}
}
} finally {
multiProcessReturn.set(null);
}
for (ReturnEventHolder returnEventHolder : returnEventHolderList) {
outputRateLimiter.sendToCallBacks(returnEventHolder.complexEventChunk);
}
}
}
}
@Override
public void receive(long timestamp, Object[] data) {
if (outputRateLimiter == null) {
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertData(timestamp, data, borrowedEvent);
process(anEventSequence, borrowedEvent);
}
}
} else {
List<ReturnEventHolder> returnEventHolderList = new ArrayList<>(eventSequence.length);
try {
multiProcessReturn.set(new ReturnEventHolder());
synchronized (this) {
stabilizeStates();
for (int anEventSequence : eventSequence) {
StreamEventConverter aStreamEventConverter = streamEventConverters[anEventSequence];
StreamEventPool aStreamEventPool = streamEventPools[anEventSequence];
StreamEvent borrowedEvent = aStreamEventPool.borrowEvent();
aStreamEventConverter.convertData(timestamp, data, borrowedEvent);
process(anEventSequence, borrowedEvent);
if (multiProcessReturn.get() != null &&
multiProcessReturn.get().complexEventChunk != null) {
returnEventHolderList.add(multiProcessReturn.get());
multiProcessReturn.set(new ReturnEventHolder());
}
}
}
} finally {
multiProcessReturn.set(null);
}
for (ReturnEventHolder returnEventHolder : returnEventHolderList) {
outputRateLimiter.sendToCallBacks(returnEventHolder.complexEventChunk);
}
}
}
protected void processAndClear(int processIndex, StreamEvent streamEvent) {
ComplexEventChunk<StreamEvent> currentStreamEventChunk = new ComplexEventChunk<StreamEvent>(
streamEvent, streamEvent, batchProcessingAllowed);
nextProcessors[processIndex].process(currentStreamEventChunk);
}
protected void stabilizeStates() {
}
public void setNext(Processor nextProcessor) {
for (int i = 0, nextLength = nextProcessors.length; i < nextLength; i++) {
Processor processor = nextProcessors[i];
if (processor == null) {
nextProcessors[i] = nextProcessor;
break;
}
}
}
public void setMetaStreamEvent(MetaStreamEvent metaStreamEvent) {
for (int i = 0, nextLength = metaStreamEvents.length; i < nextLength; i++) {
MetaStreamEvent streamEvent = metaStreamEvents[i];
if (streamEvent == null) {
metaStreamEvents[i] = metaStreamEvent;
break;
}
}
}
@Override
public boolean toStream() {
return metaStreamEvents[0].getEventType() == MetaStreamEvent.EventType.DEFAULT ||
metaStreamEvents[0].getEventType() == MetaStreamEvent.EventType.WINDOW;
}
public void setStreamEventPool(StreamEventPool streamEventPool) {
for (int i = 0, nextLength = streamEventPools.length; i < nextLength; i++) {
StreamEventPool eventPool = streamEventPools[i];
if (eventPool == null) {
streamEventPools[i] = streamEventPool;
break;
}
}
}
public void init() {
for (int i = 0, nextLength = streamEventConverters.length; i < nextLength; i++) {
StreamEventConverter streamEventConverter = streamEventConverters[i];
if (streamEventConverter == null) {
streamEventConverters[i] = StreamEventConverterFactory.constructEventConverter(metaStreamEvents[i]);
break;
}
}
}
public static ThreadLocal<ReturnEventHolder> getMultiProcessReturn() {
return multiProcessReturn;
}
public void setOutputRateLimiter(OutputRateLimiter outputRateLimiter) {
this.outputRateLimiter = outputRateLimiter;
}
/**
* Class to hold the events which are differed publishing
*/
public class ReturnEventHolder {
ComplexEventChunk complexEventChunk;
public void setReturnEvents(ComplexEventChunk complexEventChunk) {
if (this.complexEventChunk == null) {
this.complexEventChunk = new ComplexEventChunk(complexEventChunk.isBatch());
}
this.complexEventChunk.add(complexEventChunk.getFirst());
}
}
}
| |
package org.apache.maven.plugin.assembly.utils;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Properties;
import junit.framework.TestCase;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Build;
import org.apache.maven.model.Model;
import org.apache.maven.plugin.assembly.AssemblerConfigurationSource;
import org.apache.maven.plugin.assembly.archive.DefaultAssemblyArchiverTest;
import org.apache.maven.plugin.assembly.archive.task.testutils.ArtifactMock;
import org.apache.maven.plugin.assembly.format.AssemblyFormattingException;
import org.apache.maven.plugin.assembly.model.Assembly;
import org.apache.maven.project.MavenProject;
import org.easymock.classextension.EasyMockSupport;
import static org.apache.maven.plugin.assembly.utils.AssemblyFormatUtils.*;
import static org.easymock.EasyMock.expect;
public class AssemblyFormatUtilsTest
extends TestCase
{
private final EasyMockSupport mockManager = new EasyMockSupport();
public void testFixRelativePathRefs_ShouldRemoveRelativeRefToCurrentDir()
throws AssemblyFormattingException
{
assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "./path/" ) );
}
public void testFixRelativePathRefs_ShouldRemoveEmbeddedSameDirRef()
throws AssemblyFormattingException
{
assertEquals( "some/path/", AssemblyFormatUtils.fixRelativeRefs( "some/./path/" ) );
assertEquals( "some\\path\\", AssemblyFormatUtils.fixRelativeRefs( "some\\.\\path\\" ) );
}
public void testFixRelativePathRefs_ShouldRemoveEmbeddedParentDirRef()
throws AssemblyFormattingException
{
assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "some/../path/" ) );
}
public void testFixRelativePathRefs_ShouldTruncateRelativeRefToParentDir()
throws AssemblyFormattingException
{
assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "../path/" ) );
}
public void testGetDistroName_ShouldUseJustFinalNameWithNoAppendAssemblyIdOrClassifier()
{
verifyDistroName( "assembly", null, "finalName", false, "finalName" );
}
public void testGetDistroName_ShouldUseFinalNamePlusClassifierWhenAppendAssemblyIdIsNull()
{
verifyDistroName( "assembly", "classifier", "finalName", false, "finalName-classifier" );
}
public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingArtifactProject( "${artifact.groupId}", null, "group", null, null, null, null, "group/" );
}
public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingArtifactProject( "${artifact.artifactId}", null, null, "artifact", null, null, null,
"artifact/" );
}
public void testGetOutputDir_ShouldResolveVersionInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingArtifactProject( "${artifact.version}", null, null, null, "version", null, null, "version/" );
}
public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingArtifactProject( "${artifact.build.finalName}", null, null, null, null, "finalName", null,
"finalName/" );
}
public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseModuleInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingModuleProject( "${module.groupId}", null, "group", null, null, null, null, "group/" );
}
public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseModuleInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingModuleProject( "${module.artifactId}", null, null, "artifact", null, null, null,
"artifact/" );
}
public void testGetOutputDir_ShouldResolveVersionInOutDir_UseModuleInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingModuleProject( "${module.version}", null, null, null, "version", null, null, "version/" );
}
public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseModuleInfo()
throws AssemblyFormattingException
{
verifyOutputDirUsingModuleProject( "${module.build.finalName}", null, null, null, null, "finalName", null,
"finalName/" );
}
public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${pom.groupId}", null, "group", null, null, null, null, "group/" );
}
public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${pom.artifactId}", null, null, "artifact", null, null, null, "artifact/" );
}
public void testGetOutputDir_ShouldResolveVersionInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${pom.version}", null, null, null, "version", null, null, "version/" );
}
public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${pom.build.finalName}", null, null, null, null, "finalName", null,
"finalName/" );
}
public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${project.groupId}", null, "group", null, null, null, null, "group/" );
}
public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${project.artifactId}", null, null, "artifact", null, null, null, "artifact/" );
}
public void testGetOutputDir_ShouldResolveVersionInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${project.version}", null, null, null, "version", null, null, "version/" );
}
public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyOutputDir( "${project.build.finalName}", null, "finalName", "finalName/" );
}
public void testGetOutputDir_ShouldNotAlterOutDirWhenIncludeBaseFalseAndNoExpressions()
throws AssemblyFormattingException
{
verifyOutputDir( "dir/", "finalName", null, "dir/" );
}
public void testGetOutputDir_ShouldNotAlterOutDirWhenIncludeBaseFalseAndNoExpressions_CheckWithBackslash()
throws AssemblyFormattingException
{
verifyOutputDir( "dir\\", "finalName", null, "dir\\" );
}
public void testGetOutputDir_ShouldAppendSlashToOutDirWhenMissingAndIncludeBaseFalseAndNoExpressions()
throws AssemblyFormattingException
{
verifyOutputDir( "dir", "finalName", null, "dir/" );
}
public void testGetOutputDir_ShouldResolveGroupIdInOutDir()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${groupId}", "finalName", "group", null, null, null, null, "group/" );
}
public void testGetOutputDir_ShouldResolveArtifactIdInOutDir()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${artifactId}", "finalName", null, "artifact", null, null, null, "artifact/" );
}
public void testGetOutputDir_ShouldResolveVersionInOutDir()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "${version}", "finalName", null, null, "version", null, null, "version/" );
}
public void testGetOutputDir_ShouldResolveVersionInLargerOutDirExpr()
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( "my-special-${version}", "finalName", null, null, "99", null, null,
"my-special-99/" );
}
public void testGetOutputDir_ShouldResolveFinalNameInOutDir()
throws AssemblyFormattingException
{
verifyOutputDir( "${finalName}", "finalName", null, "finalName/" );
}
public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir()
throws AssemblyFormattingException
{
verifyOutputDir( "${build.finalName}", "finalName", null, "finalName/" );
}
public void testGetOutputDir_ShouldReturnEmptyPathWhenAllInputIsEmptyAndIncludeBaseFalse()
throws AssemblyFormattingException
{
verifyOutputDir( null, null, null, "" );
}
public void testGetOutputDir_ShouldRemoveRelativeRefToCurrentDir()
throws AssemblyFormattingException
{
verifyOutputDir( "./path/", null, null, "path/" );
}
public void testGetOutputDir_ShouldRemoveEmbeddedSameDirRef()
throws AssemblyFormattingException
{
verifyOutputDir( "some/./path/", null, null, "some/path/" );
}
public void testGetOutputDir_ShouldRemoveEmbeddedParentDirRef()
throws AssemblyFormattingException
{
verifyOutputDir( "some/../path/", null, null, "path/" );
}
public void testGetOutputDir_ShouldTruncateRelativeRefToParentDir()
throws AssemblyFormattingException
{
verifyOutputDir( "../path/", null, null, "path/" );
}
public void testGetOutputDir_ShouldResolveProjectProperty()
throws AssemblyFormattingException
{
final Properties props = new Properties();
props.setProperty( "myProperty", "value" );
verifyOutputDirUsingMainProject( "file.${myProperty}", null, null, null, null, null, props, "file.value/" );
}
public void testGetOutputDir_ShouldResolveProjectPropertyAltExpr()
throws AssemblyFormattingException
{
final Properties props = new Properties();
props.setProperty( "myProperty", "value" );
verifyOutputDirUsingMainProject( "file.${pom.properties.myProperty}", null, null, null, null, null, props,
"file.value/" );
}
public void testEvalFileNameMapping_ShouldResolveArtifactIdAndBaseVersionInOutDir_UseArtifactInfo_WithValidMainProject()
throws AssemblyFormattingException
{
final MavenProject mainProject = createProject( "group", "main", "1", null );
final String artifactVersion = "2-20070807.112233-1";
final String artifactBaseVersion = "2-SNAPSHOT";
final MavenProject artifactProject = createProject( "group", "artifact", artifactVersion, null );
final ArtifactMock artifactMock =
new ArtifactMock( mockManager, "group", "artifact", artifactVersion, "jar", true, artifactBaseVersion );
artifactProject.setArtifact( artifactMock.getArtifact() );
final MavenSession session = mockManager.createMock(MavenSession.class);
expect( session.getExecutionProperties()).andReturn( null ).anyTimes();
expect( session.getUserProperties()).andReturn( new Properties( ) ).anyTimes();
final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class );
expect( cs.getMavenSession()).andReturn( session ).anyTimes();
DefaultAssemblyArchiverTest.setupInterpolators( cs);
mockManager.replayAll();
final String result =
evaluateFileNameMapping( "${artifact.artifactId}-${artifact.baseVersion}", artifactMock.getArtifact(),
mainProject, null, cs, moduleProjectInterpolator( null ),
artifactProjectInterpolator( artifactProject ) );
/*
final Artifact artifact = artifactMock.getArtifact();
final String result =
AssemblyFormatUtils.evaluateFileNameMapping( "${artifact.artifactId}-${artifact.baseVersion}",
moduleArtifactInterpolator( null ),
moduleProjectInterpolator( null ),
artifactInterpolator( artifact ),
artifactProjectInterpolator( artifactProject ),
mainArtifactPropsOnly( mainProject ),
classifierRules( artifact ),
FixedStringSearchInterpolator.empty() );
*/
assertEquals( "artifact-2-SNAPSHOT", result );
mockManager.verifyAll();
// clear out for next call.
mockManager.resetAll();
}
public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingArtifactProject( "${artifact.groupId}", null, "group", null, null, null, "group",
null );
}
public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingArtifactProject( "${artifact.artifactId}", null, null, "artifact", null, null,
"artifact", null );
}
public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseArtifactInfo()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingArtifactProject( "${artifact.version}", null, null, null, "version", null,
"version", null );
}
public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseArtifactInfoAndModulePrefix()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingModuleProject( "${module.groupId}", null, "group", null, null, null, "group",
null );
}
public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseArtifactInfoAndModulePrefix()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingModuleProject( "${module.artifactId}", null, null, "artifact", null, null,
"artifact", null );
}
public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseArtifactInfoAndModulePrefix()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingModuleProject( "${module.version}", null, null, null, "version", null, "version",
null );
}
public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${pom.groupId}", null, "group", null, null, null, "group", null );
}
public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${pom.artifactId}", null, null, "artifact", null, null, "artifact",
null );
}
public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseExplicitMainProject()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${pom.version}", null, null, null, "version", null, "version", null );
}
public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${project.groupId}", null, "group", null, null, null, "group", null );
}
public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${project.artifactId}", null, null, "artifact", null, null,
"artifact", null );
}
public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseExplicitMainProject_projectRef()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${project.version}", null, null, null, "version", null, "version",
null );
}
public void testEvalFileNameMapping_ShouldRemoveRelativeRefToCurrentDir()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "./path/", null, null, null, null, null, "path/",
null );
}
public void testEvalFileNameMapping_ShouldRemoveEmbeddedSameDirRef()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "some/./path/", null, null, null, null, null, "some/path/",
null );
}
public void testEvalFileNameMapping_ShouldRemoveEmbeddedParentDirRef()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "some/../path/", null, null, null, null, null, "path/",
null );
}
public void testEvalFileNameMapping_ShouldTruncateRelativeRefToParentDir()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "../path/", null, null, null, null, null, "path/",
null );
}
public void testEvalFileNameMapping_ShouldPassExpressionThroughUnchanged()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "filename", null, null, "filename", null );
}
public void testEvalFileNameMapping_ShouldInsertClassifierAheadOfExtension()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "filename-${artifact.classifier}.ext", "classifier", null,
"filename-classifier.ext", null );
}
public void testEvalFileNameMapping_ShouldAppendDashClassifierWhenClassifierPresent()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "filename${dashClassifier?}", "classifier", null, "filename-classifier", null );
}
public void testEvalFileNameMapping_ShouldNotAppendDashClassifierWhenClassifierMissing()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "filename${dashClassifier?}", null, null, "filename", null );
}
public void testEvalFileNameMapping_ShouldNotAppendDashClassifierWhenClassifierEmpty()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "filename${dashClassifier?}", "", null, "filename", null );
}
public void testEvalFileNameMapping_ShouldResolveGroupId()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${groupId}", null, "group", null, null, null, "group", null );
}
public void testEvalFileNameMapping_ShouldResolveArtifactId()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${artifactId}", null, null, "artifact", null, null, "artifact",
null );
}
public void testEvalFileNameMapping_ShouldResolveVersion()
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( "${version}", null, null, null, "version", null, "version", null );
}
public void testEvalFileNameMapping_ShouldResolveExtension()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "file.${artifact.extension}", null, "ext", "file.ext", null );
}
public void testEvalFileNameMapping_ShouldResolveProjectProperty()
throws AssemblyFormattingException
{
final Properties props = new Properties();
props.setProperty( "myProperty", "value" );
verifyEvalFileNameMapping( "file.${myProperty}", null, null, "file.value", props );
}
public void testEvalFileNameMapping_ShouldResolveProjectPropertyAltExpr()
throws AssemblyFormattingException
{
final Properties props = new Properties();
props.setProperty( "myProperty", "value" );
verifyEvalFileNameMapping( "file.${pom.properties.myProperty}", null, null, "file.value", props );
}
public void testEvalFileNameMapping_ShouldResolveSystemPropertyWithoutMainProjectPresent()
throws AssemblyFormattingException
{
verifyEvalFileNameMapping( "file.${java.version}", null, null, "file." + System.getProperty( "java.version" ),
null );
}
private void verifyEvalFileNameMapping( final String expression, final String classifier, final String extension,
final String checkValue, final Properties projectProperties )
throws AssemblyFormattingException
{
verifyEvalFileNameMappingUsingMainProject( expression, classifier, null, null, null, extension, checkValue,
projectProperties );
}
private void verifyEvalFileNameMappingUsingMainProject( final String expression, final String classifier,
final String groupId, final String artifactId,
final String version, final String extension,
final String checkValue, final Properties projectProperties )
throws AssemblyFormattingException
{
final MavenProject mainProject = createProject( groupId, artifactId, version, projectProperties );
final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null );
verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject,
checkValue );
}
private void verifyEvalFileNameMappingUsingArtifactProject( final String expression, final String classifier,
final String groupId, final String artifactId,
final String version, final String extension,
final String checkValue,
final Properties projectProperties )
throws AssemblyFormattingException
{
final MavenProject artifactProject = createProject( groupId, artifactId, version, projectProperties );
final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null );
verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject,
checkValue );
}
private void verifyEvalFileNameMappingUsingModuleProject( final String expression, final String classifier,
final String groupId, final String artifactId,
final String version, final String extension,
final String checkValue,
final Properties projectProperties )
throws AssemblyFormattingException
{
final MavenProject moduleProject = createProject( groupId, artifactId, version, projectProperties );
final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null );
verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject,
checkValue );
}
private MavenProject createProject( String groupId, String artifactId, String version,
final Properties projectProperties )
{
if ( artifactId == null )
{
artifactId = "artifact";
}
if ( groupId == null )
{
groupId = "group";
}
if ( version == null )
{
version = "version";
}
final Model model = new Model();
model.setGroupId( groupId );
model.setArtifactId( artifactId );
model.setVersion( version );
model.setProperties( projectProperties );
return new MavenProject( model );
}
private void verifyEvalFileNameMapping( final String expression, final String classifier, final String extension,
final MavenProject mainProject, final MavenProject moduleProject,
final MavenProject artifactProject, final String checkValue )
throws AssemblyFormattingException
{
final ArtifactMock artifactMock =
new ArtifactMock( mockManager, artifactProject.getGroupId(), artifactProject.getArtifactId(),
artifactProject.getVersion(), extension, classifier, false, null );
final ArtifactMock moduleArtifactMock =
new ArtifactMock( mockManager, moduleProject.getGroupId(), moduleProject.getArtifactId(),
moduleProject.getVersion(), "jar", false, null );
final MavenSession session = mockManager.createMock( MavenSession.class );
expect( session.getExecutionProperties()).andReturn( System.getProperties() ).anyTimes();
expect(session.getUserProperties()).andReturn( new Properties( ) ).anyTimes();
final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class );
expect( cs.getMavenSession()).andReturn( session ).anyTimes();
DefaultAssemblyArchiverTest.setupInterpolators( cs, mainProject);
mockManager.replayAll();
final String result =
AssemblyFormatUtils.evaluateFileNameMapping( expression, artifactMock.getArtifact(), mainProject,
moduleArtifactMock.getArtifact(), cs,
moduleProjectInterpolator( moduleProject ),
artifactProjectInterpolator( artifactProject ) );
/*
final String result =
AssemblyFormatUtils.evaluateFileNameMapping( expression,
moduleArtifactInterpolator( moduleArtifactMock.getArtifact() ),
moduleProjectInterpolator( moduleProject ),
artifactInterpolator( artifactMock.getArtifact() ),
artifactProjectInterpolator( artifactProject ),
mainArtifactPropsOnly( mainProject ),
classifierRules( artifactMock.getArtifact() ),
FixedStringSearchInterpolator.create( new PropertiesBasedValueSource( System.getProperties() )) );
*/
assertEquals( checkValue, result );
mockManager.verifyAll();
// clear out for next call.
mockManager.resetAll();
}
private void verifyOutputDir( final String outDir, final String finalName, final String projectFinalName,
final String checkValue )
throws AssemblyFormattingException
{
verifyOutputDirUsingMainProject( outDir, finalName, null, null, null, projectFinalName, null, checkValue );
}
private void verifyOutputDirUsingMainProject( final String outDir, final String finalName, final String groupId,
final String artifactId, final String version,
final String projectFinalName, final Properties properties,
final String checkValue )
throws AssemblyFormattingException
{
final MavenProject project = createProject( groupId, artifactId, version, properties );
if ( projectFinalName != null )
{
final Build build = new Build();
build.setFinalName( projectFinalName );
project.getModel().setBuild( build );
}
final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null );
verifyOutputDir( outDir, finalName, project, moduleProject, artifactProject, checkValue );
}
private void verifyOutputDirUsingModuleProject( final String outDir, final String finalName, final String groupId,
final String artifactId, final String version,
final String projectFinalName, final Properties properties,
final String checkValue )
throws AssemblyFormattingException
{
final MavenProject project = createProject( groupId, artifactId, version, properties );
if ( projectFinalName != null )
{
final Build build = new Build();
build.setFinalName( projectFinalName );
project.getModel().setBuild( build );
}
final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null );
verifyOutputDir( outDir, finalName, mainProject, project, artifactProject, checkValue );
}
private void verifyOutputDirUsingArtifactProject( final String outDir, final String finalName,
final String groupId, final String artifactId,
final String version, final String projectFinalName,
final Properties properties, final String checkValue )
throws AssemblyFormattingException
{
final MavenProject project = createProject( groupId, artifactId, version, properties );
if ( projectFinalName != null )
{
final Build build = new Build();
build.setFinalName( projectFinalName );
project.getModel().setBuild( build );
}
final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null );
final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null );
verifyOutputDir( outDir, finalName, mainProject, moduleProject, project, checkValue );
}
private void verifyOutputDir( final String outDir, final String finalName, final MavenProject mainProject,
final MavenProject moduleProject, final MavenProject artifactProject,
final String checkValue )
throws AssemblyFormattingException
{
final MavenSession session = mockManager.createMock( MavenSession.class );
expect( session.getExecutionProperties()).andReturn( System.getProperties()).anyTimes();
expect( session.getUserProperties()).andReturn( new Properties( ) ).anyTimes();
final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class );
expect( cs.getMavenSession()).andReturn( session ).anyTimes();
DefaultAssemblyArchiverTest.setupInterpolators( cs, mainProject);
String result;
mockManager.replayAll();
result =
AssemblyFormatUtils.getOutputDirectory( outDir, finalName, cs,
moduleProjectInterpolator( moduleProject ),
artifactProjectInterpolator( artifactProject ) );
assertEquals( checkValue, result );
mockManager.verifyAll();
mockManager.resetAll();
}
private void verifyDistroName( final String assemblyId, final String classifier, final String finalName,
final boolean appendAssemblyId, final String checkValue )
{
final MockAndControlForGetDistroName mac =
new MockAndControlForGetDistroName( finalName, appendAssemblyId, classifier );
mockManager.replayAll();
final Assembly assembly = new Assembly();
assembly.setId( assemblyId );
final String result = AssemblyFormatUtils.getDistributionName( assembly, mac.configSource );
assertEquals( checkValue, result );
mockManager.verifyAll();
// clear it out for the next call.
mockManager.resetAll();
}
private final class MockAndControlForGetDistroName
{
final AssemblerConfigurationSource configSource;
private final String classifier;
private final boolean isAssemblyIdAppended;
private final String finalName;
public MockAndControlForGetDistroName( final String finalName, final boolean isAssemblyIdAppended,
final String classifier )
{
this.finalName = finalName;
this.isAssemblyIdAppended = isAssemblyIdAppended;
this.classifier = classifier;
configSource = mockManager.createMock (AssemblerConfigurationSource.class);
enableExpectations();
}
private void enableExpectations()
{
expect(configSource.getClassifier()).andReturn( classifier ).atLeastOnce();
expect(configSource.isAssemblyIdAppended()).andReturn( isAssemblyIdAppended ).atLeastOnce();
expect( configSource.getFinalName()).andReturn( finalName ).atLeastOnce();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import org.antlr.runtime.TokenRewriteStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
/**
* UnparseTranslator is used to "unparse" objects such as views when their
* definition is stored. It has a translations map where its possible to replace all the
* text with the appropriate escaped version [say invites.ds will be replaced with
* `invites`.`ds` and the entire query is processed like this and stored as
* Extended text in table's metadata]. This holds all individual translations and
* where they apply in the stream. The unparse is lazy and happens when
* SemanticAnalyzer.saveViewDefinition() calls TokenRewriteStream.toString().
*
*/
public class UnparseTranslator {
// key is token start index
private final NavigableMap<Integer, Translation> translations;
private final List<CopyTranslation> copyTranslations;
private boolean enabled;
private Configuration conf;
public UnparseTranslator(Configuration conf) {
this.conf = conf;
translations = new TreeMap<Integer, Translation>();
copyTranslations = new ArrayList<CopyTranslation>();
}
/**
* Enable this translator.
*/
void enable() {
enabled = true;
}
/**
* @return whether this translator has been enabled
*/
boolean isEnabled() {
return enabled;
}
/**
* Register a translation to be performed as part of unparse. ANTLR imposes
* strict conditions on the translations and errors out during
* TokenRewriteStream.toString() if there is an overlap. It expects all
* the translations to be disjoint (See HIVE-2439).
* If the translation overlaps with any previously
* registered translation, then it must be either
* identical or a prefix (in which cases it is ignored),
* or else it must extend the existing translation (i.e.
* the existing translation must be a prefix/suffix of the new translation).
* All other overlap cases result in assertion failures.
*
* @param node
* target node whose subtree is to be replaced
*
* @param replacementText
* text to use as replacement
*/
void addTranslation(ASTNode node, String replacementText) {
if (!enabled) {
return;
}
if (node.getOrigin() != null) {
// This node was parsed while loading the definition of another view
// being referenced by the one being created, and we don't want
// to track any expansions for the underlying view.
return;
}
int tokenStartIndex = node.getTokenStartIndex();
int tokenStopIndex = node.getTokenStopIndex();
if (tokenStopIndex < 0) {
// this is for artificially added tokens
return;
}
Translation translation = new Translation();
translation.tokenStopIndex = tokenStopIndex;
translation.replacementText = replacementText;
// Sanity check for overlap with regions already being expanded
assert (tokenStopIndex >= tokenStartIndex);
List<Integer> subsetEntries = new ArrayList<Integer>();
// Is the existing entry and newer entry are subset of one another ?
for (Map.Entry<Integer, Translation> existingEntry :
translations.headMap(tokenStopIndex, true).entrySet()) {
// check if the new entry contains the existing
if (existingEntry.getValue().tokenStopIndex <= tokenStopIndex &&
existingEntry.getKey() >= tokenStartIndex) {
// Collect newer entry is if a super-set of existing entry,
subsetEntries.add(existingEntry.getKey());
// check if the existing entry contains the new
} else if (existingEntry.getValue().tokenStopIndex >= tokenStopIndex &&
existingEntry.getKey() <= tokenStartIndex) {
assert (existingEntry.getValue().replacementText.contains(replacementText));
// we don't need to add this new entry since there's already an overlapping one
return;
}
}
// remove any existing entries that are contained by the new one
for (Integer index : subsetEntries) {
translations.remove(index);
}
// It's all good: create a new entry in the map (or update existing one)
translations.put(tokenStartIndex, translation);
}
/**
* Register a translation for an tabName.
*
* @param node
* source node (which must be an tabName) to be replaced
*/
void addTableNameTranslation(ASTNode tableName, String currentDatabaseName) {
if (!enabled) {
return;
}
if (tableName.getToken().getType() == HiveParser.Identifier) {
addIdentifierTranslation(tableName);
return;
}
assert (tableName.getToken().getType() == HiveParser.TOK_TABNAME);
assert (tableName.getChildCount() <= 2);
if (tableName.getChildCount() == 2) {
addIdentifierTranslation((ASTNode)tableName.getChild(0));
addIdentifierTranslation((ASTNode)tableName.getChild(1));
}
else {
// transform the table reference to an absolute reference (i.e., "db.table")
StringBuilder replacementText = new StringBuilder();
replacementText.append(HiveUtils.unparseIdentifier(currentDatabaseName, conf));
replacementText.append('.');
ASTNode identifier = (ASTNode)tableName.getChild(0);
String identifierText = BaseSemanticAnalyzer.unescapeIdentifier(identifier.getText());
replacementText.append(HiveUtils.unparseIdentifier(identifierText, conf));
addTranslation(identifier, replacementText.toString());
}
}
/**
* Register a translation for an identifier.
*
* @param node
* source node (which must be an identifier) to be replaced
*/
void addIdentifierTranslation(ASTNode identifier) {
if (!enabled) {
return;
}
assert (identifier.getToken().getType() == HiveParser.Identifier);
String replacementText = identifier.getText();
replacementText = BaseSemanticAnalyzer.unescapeIdentifier(replacementText);
replacementText = HiveUtils.unparseIdentifier(replacementText, conf);
addTranslation(identifier, replacementText);
}
/**
* Register a "copy" translation in which a node will be translated into
* whatever the translation turns out to be for another node (after
* previously registered translations have already been performed). Deferred
* translations are performed in the order they are registered, and follow
* the same rules regarding overlap as non-copy translations.
*
* @param targetNode node whose subtree is to be replaced
*
* @param sourceNode the node providing the replacement text
*
*/
void addCopyTranslation(ASTNode targetNode, ASTNode sourceNode) {
if (!enabled) {
return;
}
if (targetNode.getOrigin() != null) {
return;
}
CopyTranslation copyTranslation = new CopyTranslation();
copyTranslation.targetNode = targetNode;
copyTranslation.sourceNode = sourceNode;
copyTranslations.add(copyTranslation);
}
/**
* Apply all translations on the given token stream.
*
* @param tokenRewriteStream
* rewrite-capable stream
*/
void applyTranslations(TokenRewriteStream tokenRewriteStream) {
for (Map.Entry<Integer, Translation> entry : translations.entrySet()) {
if (entry.getKey() > 0) { // negative means the key didn't exist in the original
// stream (i.e.: we changed the tree)
tokenRewriteStream.replace(
entry.getKey(),
entry.getValue().tokenStopIndex,
entry.getValue().replacementText);
}
}
for (CopyTranslation copyTranslation : copyTranslations) {
String replacementText = tokenRewriteStream.toString(
copyTranslation.sourceNode.getTokenStartIndex(),
copyTranslation.sourceNode.getTokenStopIndex());
String currentText = tokenRewriteStream.toString(
copyTranslation.targetNode.getTokenStartIndex(),
copyTranslation.targetNode.getTokenStopIndex());
if (currentText.equals(replacementText)) {
// copy is a nop, so skip it--this is important for avoiding
// spurious overlap assertions
continue;
}
// Call addTranslation just to get the assertions for overlap
// checking.
addTranslation(copyTranslation.targetNode, replacementText);
tokenRewriteStream.replace(
copyTranslation.targetNode.getTokenStartIndex(),
copyTranslation.targetNode.getTokenStopIndex(),
replacementText);
}
}
private static class Translation {
int tokenStopIndex;
String replacementText;
@Override
public String toString() {
return "" + tokenStopIndex + " -> " + replacementText;
}
}
private static class CopyTranslation {
ASTNode targetNode;
ASTNode sourceNode;
}
public void clear() {
translations.clear();
copyTranslations.clear();
enabled = false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.util;
import java.io.Closeable;
import java.io.IOException;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.Timestamp;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.AbstractIterator;
import org.apache.jackrabbit.oak.commons.OakVersion;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.StringUtils;
import org.apache.jackrabbit.oak.plugins.document.ClusterNodeInfo;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.RevisionVector;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.stats.Clock;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.transform;
import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.isDeletedEntry;
/**
* Utility methods.
*/
public class Utils {
private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
private static String MODULE_VERSION = null;
/**
* Approximate length of a Revision string.
*/
private static final int REVISION_LENGTH =
new Revision(System.currentTimeMillis(), 0, 0).toString().length();
/**
* The length of path (in characters), whose UTF-8 representation can not
* possibly be too large to be used for the primary key for the document
* store.
*/
public static final int PATH_SHORT = Integer.getInteger("oak.pathShort", 165);
/**
* The maximum length of the parent path, in bytes. If the parent path is
* longer, then the id of a document is no longer the path, but the hash of
* the parent, and then the node name.
*/
public static final int PATH_LONG = Integer.getInteger("oak.pathLong", 350);
/**
* The maximum size a node name, in bytes. This is only a problem for long path.
*/
public static final int NODE_NAME_LIMIT = Integer.getInteger("oak.nodeNameLimit", 150);
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final char[] HEX_DIGITS = "0123456789abcdef".toCharArray();
/**
* A predicate for property and _deleted names.
*/
public static final Predicate<String> PROPERTY_OR_DELETED = new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return Utils.isPropertyName(input) || isDeletedEntry(input);
}
};
public static int pathDepth(String path) {
if (path.equals("/")) {
return 0;
}
int depth = 0;
for (int i = 0; i < path.length(); i++) {
if (path.charAt(i) == '/') {
depth++;
}
}
return depth;
}
@SuppressWarnings("unchecked")
public static int estimateMemoryUsage(Map<?, Object> map) {
if (map == null) {
return 0;
}
long size = 0;
for (Entry<?, Object> e : map.entrySet()) {
if (e.getKey() instanceof Revision) {
size += 32;
} else {
size += StringUtils.estimateMemoryUsage(e.getKey().toString());
}
Object o = e.getValue();
if (o instanceof String) {
size += StringUtils.estimateMemoryUsage((String) o);
} else if (o instanceof Long) {
size += 16;
} else if (o instanceof Boolean) {
size += 8;
} else if (o instanceof Integer) {
size += 8;
} else if (o instanceof Map) {
size += 8 + (long)estimateMemoryUsage((Map<String, Object>) o);
} else if (o == null) {
// zero
} else {
throw new IllegalArgumentException("Can't estimate memory usage of " + o);
}
}
// overhead for map object
// TreeMap (80) + unmodifiable wrapper (32)
size += 112;
// 64 bytes per entry
size += (long)map.size() * 64;
if (size > Integer.MAX_VALUE) {
LOG.debug("Estimated memory footprint larger than Integer.MAX_VALUE: {}.", size);
size = Integer.MAX_VALUE;
}
return (int) size;
}
public static String escapePropertyName(String propertyName) {
int len = propertyName.length();
if (len == 0) {
return "_";
}
// avoid creating a buffer if escaping is not needed
StringBuilder buff = null;
char c = propertyName.charAt(0);
int i = 0;
if (c == '_' || c == '$') {
buff = new StringBuilder(len + 1);
buff.append('_').append(c);
i++;
}
for (; i < len; i++) {
c = propertyName.charAt(i);
char rep;
switch (c) {
case '.':
rep = 'd';
break;
case '\\':
rep = '\\';
break;
default:
rep = 0;
}
if (rep != 0) {
if (buff == null) {
buff = new StringBuilder(propertyName.substring(0, i));
}
buff.append('\\').append(rep);
} else if (buff != null) {
buff.append(c);
}
}
return buff == null ? propertyName : buff.toString();
}
public static String unescapePropertyName(String key) {
int len = key.length();
if (key.startsWith("_")
&& (key.startsWith("__") || key.startsWith("_$") || len == 1)) {
key = key.substring(1);
len--;
}
// avoid creating a buffer if escaping is not needed
StringBuilder buff = null;
for (int i = 0; i < len; i++) {
char c = key.charAt(i);
if (c == '\\') {
if (buff == null) {
buff = new StringBuilder(key.substring(0, i));
}
c = key.charAt(++i);
if (c == '\\') {
// ok
} else if (c == 'd') {
c = '.';
}
buff.append(c);
} else if (buff != null) {
buff.append(c);
}
}
return buff == null ? key : buff.toString();
}
public static boolean isPropertyName(String key) {
return !key.startsWith("_") || key.startsWith("__") || key.startsWith("_$");
}
public static String getIdFromPath(String path) {
if (isLongPath(path)) {
MessageDigest digest;
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
int depth = Utils.pathDepth(path);
String parent = PathUtils.getParentPath(path);
byte[] hash = digest.digest(parent.getBytes(UTF_8));
String name = PathUtils.getName(path);
StringBuilder sb = new StringBuilder(digest.getDigestLength() * 2 + name.length() + 6);
sb.append(depth).append(":h");
encodeHexString(hash, sb).append("/").append(name);
return sb.toString();
}
int depth = Utils.pathDepth(path);
return depth + ":" + path;
}
/**
* Encodes the given data as hexadecimal string representation and appends
* it to the {@code StringBuilder}. The hex digits are in lower case.
*
* @param data the bytes to encode.
* @param sb the hexadecimal string representation is appended to this
* {@code StringBuilder}.
* @return the {@code StringBuilder} passed to this method.
*/
public static StringBuilder encodeHexString(byte[] data, StringBuilder sb) {
for (byte b : data) {
sb.append(HEX_DIGITS[(b >> 4) & 0xF]);
sb.append(HEX_DIGITS[(b & 0xF)]);
}
return sb;
}
/**
* Returns the parent id for given id if possible
*
* <p>It would return null in following cases
* <ul>
* <li>If id is from long path</li>
* <li>If id is for root path</li>
* <li>If id is for an invalid path</li>
* </ul>
* @param id id for which parent id needs to be determined
* @return parent id. null if parent id cannot be determined
*/
@Nullable
public static String getParentId(String id){
if(Utils.isIdFromLongPath(id)){
return null;
}
String path = Utils.getPathFromId(id);
if (!PathUtils.isValid(path)) {
return null;
}
if(PathUtils.denotesRoot(path)){
return null;
}
String parentPath = PathUtils.getParentPath(path);
return Utils.getIdFromPath(parentPath);
}
public static boolean isLongPath(String path) {
// the most common case: a short path
// avoid calculating the parent path
if (path.length() < PATH_SHORT) {
return false;
}
// check if the parent path is long
byte[] parent = PathUtils.getParentPath(path).getBytes(UTF_8);
if (parent.length < PATH_LONG) {
return false;
}
String name = PathUtils.getName(path);
if (name.getBytes(UTF_8).length > NODE_NAME_LIMIT) {
throw new IllegalArgumentException("Node name is too long: " + path);
}
return true;
}
public static boolean isIdFromLongPath(String id) {
int index = id.indexOf(':');
return index != -1 && index < id.length() - 1 && id.charAt(index + 1) == 'h';
}
public static String getPathFromId(String id) {
if (isIdFromLongPath(id)) {
throw new IllegalArgumentException("Id is hashed: " + id);
}
int index = id.indexOf(':');
return id.substring(index + 1);
}
public static int getDepthFromId(String id) throws IllegalArgumentException {
try {
int index = id.indexOf(':');
if (index >= 0) {
return Integer.parseInt(id.substring(0, index));
}
} catch (NumberFormatException e) {
// ignore and throw IllegalArgumentException
}
throw new IllegalArgumentException("Invalid id: " + id);
}
public static String getPreviousPathFor(String path, Revision r, int height) {
if (!PathUtils.isAbsolute(path)) {
throw new IllegalArgumentException("path must be absolute: " + path);
}
StringBuilder sb = new StringBuilder(path.length() + REVISION_LENGTH + 3);
sb.append("p").append(path);
if (sb.charAt(sb.length() - 1) != '/') {
sb.append('/');
}
r.toStringBuilder(sb).append("/").append(height);
return sb.toString();
}
public static String getPreviousIdFor(String path, Revision r, int height) {
return getIdFromPath(getPreviousPathFor(path, r, height));
}
/**
* Determines if the passed id belongs to a previous doc
*
* @param id id to check
* @return true if the id belongs to a previous doc
*/
public static boolean isPreviousDocId(String id){
int indexOfColon = id.indexOf(':');
if (indexOfColon > 0 && indexOfColon < id.length() - 1){
return id.charAt(indexOfColon + 1) == 'p';
}
return false;
}
/**
* Determines if the passed id belongs to a leaf level previous doc
*
* @param id id to check
* @return true if the id belongs to a leaf level previous doc
*/
public static boolean isLeafPreviousDocId(String id){
return isPreviousDocId(id) && id.endsWith("/0");
}
/**
* Deep copy of a map that may contain map values.
*
* @param source the source map
* @param target the target map
* @param <K> the type of the map key
*/
public static <K> void deepCopyMap(Map<K, Object> source, Map<K, Object> target) {
for (Entry<K, Object> e : source.entrySet()) {
Object value = e.getValue();
Comparator<? super K> comparator = null;
if (value instanceof SortedMap) {
@SuppressWarnings("unchecked")
SortedMap<K, Object> map = (SortedMap<K, Object>) value;
comparator = map.comparator();
}
if (value instanceof Map<?, ?>) {
@SuppressWarnings("unchecked")
Map<K, Object> old = (Map<K, Object>) value;
Map<K, Object> c = new TreeMap<K, Object>(comparator);
deepCopyMap(old, c);
value = c;
}
target.put(e.getKey(), value);
}
}
/**
* Returns the lower key limit to retrieve the children of the given
* <code>path</code>.
*
* @param path a path.
* @return the lower key limit.
*/
public static String getKeyLowerLimit(String path) {
String from = PathUtils.concat(path, "a");
from = getIdFromPath(from);
from = from.substring(0, from.length() - 1);
return from;
}
/**
* Returns the upper key limit to retrieve the children of the given
* <code>path</code>.
*
* @param path a path.
* @return the upper key limit.
*/
public static String getKeyUpperLimit(String path) {
String to = PathUtils.concat(path, "z");
to = getIdFromPath(to);
to = to.substring(0, to.length() - 2) + "0";
return to;
}
/**
* Returns parentId extracted from the fromKey. fromKey is usually constructed
* using Utils#getKeyLowerLimit
*
* @param fromKey key used as start key in queries
* @return parentId if possible.
*/
@Nullable
public static String getParentIdFromLowerLimit(String fromKey){
//If key just ends with slash 2:/foo/ then append a fake
//name to create a proper id
if(fromKey.endsWith("/")){
fromKey = fromKey + "a";
}
return getParentId(fromKey);
}
/**
* Returns <code>true</code> if a revision tagged with the given revision
* should be considered committed, <code>false</code> otherwise. Committed
* revisions have a tag, which equals 'c' or starts with 'c-'.
*
* @param tag the tag (may be <code>null</code>).
* @return <code>true</code> if committed; <code>false</code> otherwise.
*/
public static boolean isCommitted(@Nullable String tag) {
return tag != null && (tag.equals("c") || tag.startsWith("c-"));
}
/**
* Resolve the commit revision for the given revision <code>rev</code> and
* the associated commit tag.
*
* @param rev a revision.
* @param tag the associated commit tag.
* @return the actual commit revision for <code>rev</code>.
*/
@NotNull
public static Revision resolveCommitRevision(@NotNull Revision rev,
@NotNull String tag) {
return checkNotNull(tag).startsWith("c-") ?
Revision.fromString(tag.substring(2)) : rev;
}
/**
* Closes the obj its of type {@link java.io.Closeable}. It is mostly
* used to close Iterator/Iterables which are backed by say DBCursor
*
* @param obj object to close
*/
public static void closeIfCloseable(Object obj){
if(obj instanceof Closeable){
try{
((Closeable) obj).close();
} catch (IOException e) {
LOG.warn("Error occurred while closing {}", obj, e);
}
}
}
/**
* Provides a readable string for given timestamp
*/
public static String timestampToString(long timestamp){
return (new Timestamp(timestamp) + "00").substring(0, 23);
}
/**
* Returns the revision with the newer timestamp or {@code null} if both
* revisions are {@code null}. The implementation will return the first
* revision if both have the same timestamp.
*
* @param a the first revision (or {@code null}).
* @param b the second revision (or {@code null}).
* @return the revision with the newer timestamp.
*/
@Nullable
public static Revision max(@Nullable Revision a, @Nullable Revision b) {
return max(a, b, StableRevisionComparator.INSTANCE);
}
/**
* Returns the revision which is considered more recent or {@code null} if
* both revisions are {@code null}. The implementation will return the first
* revision if both are considered equal. The comparison is done using the
* provided comparator.
*
* @param a the first revision (or {@code null}).
* @param b the second revision (or {@code null}).
* @param c the comparator.
* @return the revision considered more recent.
*/
@Nullable
public static Revision max(@Nullable Revision a,
@Nullable Revision b,
@NotNull Comparator<Revision> c) {
if (a == null) {
return b;
} else if (b == null) {
return a;
}
return c.compare(a, b) >= 0 ? a : b;
}
/**
* Returns the revision with the older timestamp or {@code null} if both
* revisions are {@code null}. The implementation will return the first
* revision if both have the same timestamp.
*
* @param a the first revision (or {@code null}).
* @param b the second revision (or {@code null}).
* @return the revision with the older timestamp.
*/
@Nullable
public static Revision min(@Nullable Revision a, @Nullable Revision b) {
return min(a, b, StableRevisionComparator.INSTANCE);
}
/**
* Returns the revision which is considered older or {@code null} if
* both revisions are {@code null}. The implementation will return the first
* revision if both are considered equal. The comparison is done using the
* provided comparator.
*
* @param a the first revision (or {@code null}).
* @param b the second revision (or {@code null}).
* @param c the comparator.
* @return the revision considered more recent.
*/
@Nullable
public static Revision min(@Nullable Revision a,
@Nullable Revision b,
@NotNull Comparator<Revision> c) {
if (a == null) {
return b;
} else if (b == null) {
return a;
}
return c.compare(a, b) <= 0 ? a : b;
}
// default batch size for paging through a document store
private static final int DEFAULT_BATCH_SIZE = 100;
/**
* Returns an {@link Iterable} over all {@link NodeDocument}s in the given
* store. The returned {@linkplain Iterable} does not guarantee a consistent
* view on the store. it may return documents that have been added to the
* store after this method had been called.
*
* @param store
* a {@link DocumentStore}.
* @return an {@link Iterable} over all documents in the store.
*/
public static Iterable<NodeDocument> getAllDocuments(final DocumentStore store) {
return internalGetSelectedDocuments(store, null, 0, DEFAULT_BATCH_SIZE);
}
/**
* Returns the root node document of the given document store. The returned
* document is retrieved from the document store via
* {@link DocumentStore#find(Collection, String)}, which means the
* implementation is allowed to return a cached version of the document.
* The document is therefore not guaranteed to be up-to-date.
*
* @param store a document store.
* @return the root document.
* @throws IllegalStateException if there is no root document.
*/
@NotNull
public static NodeDocument getRootDocument(@NotNull DocumentStore store) {
String rootId = Utils.getIdFromPath("/");
NodeDocument root = store.find(Collection.NODES, rootId);
if (root == null) {
throw new IllegalStateException("missing root document");
}
return root;
}
/**
* Returns an {@link Iterable} over all {@link NodeDocument}s in the given
* store matching a condition on an <em>indexed property</em>. The returned
* {@link Iterable} does not guarantee a consistent view on the store.
* it may return documents that have been added to the store after this
* method had been called.
*
* @param store
* a {@link DocumentStore}.
* @param indexedProperty the name of the indexed property.
* @param startValue the lower bound value for the indexed property
* (inclusive).
* @param batchSize number of documents to fetch at once
* @return an {@link Iterable} over all documents in the store matching the
* condition
*/
public static Iterable<NodeDocument> getSelectedDocuments(
DocumentStore store, String indexedProperty, long startValue, int batchSize) {
return internalGetSelectedDocuments(store, indexedProperty, startValue, batchSize);
}
/**
* Like {@link #getSelectedDocuments(DocumentStore, String, long, int)} with
* a default {@code batchSize}.
*/
public static Iterable<NodeDocument> getSelectedDocuments(
DocumentStore store, String indexedProperty, long startValue) {
return internalGetSelectedDocuments(store, indexedProperty, startValue, DEFAULT_BATCH_SIZE);
}
private static Iterable<NodeDocument> internalGetSelectedDocuments(
final DocumentStore store, final String indexedProperty,
final long startValue, final int batchSize) {
if (batchSize < 2) {
throw new IllegalArgumentException("batchSize must be > 1");
}
return new Iterable<NodeDocument>() {
@Override
public Iterator<NodeDocument> iterator() {
return new AbstractIterator<NodeDocument>() {
private String startId = NodeDocument.MIN_ID_VALUE;
private Iterator<NodeDocument> batch = nextBatch();
@Override
protected NodeDocument computeNext() {
// read next batch if necessary
if (!batch.hasNext()) {
batch = nextBatch();
}
NodeDocument doc;
if (batch.hasNext()) {
doc = batch.next();
// remember current id
startId = doc.getId();
} else {
doc = endOfData();
}
return doc;
}
private Iterator<NodeDocument> nextBatch() {
List<NodeDocument> result = indexedProperty == null ? store.query(Collection.NODES, startId,
NodeDocument.MAX_ID_VALUE, batchSize) : store.query(Collection.NODES, startId,
NodeDocument.MAX_ID_VALUE, indexedProperty, startValue, batchSize);
return result.iterator();
}
};
}
};
}
/**
* @return if {@code path} represent oak's internal path. That is, a path
* element start with a colon.
*/
public static boolean isHiddenPath(@NotNull String path) {
return path.contains("/:");
}
/**
* Transforms the given {@link Iterable} from {@link String} to
* {@link StringValue} elements. The {@link Iterable} must no have
* {@code null} values.
*/
public static Iterable<StringValue> asStringValueIterable(
@NotNull Iterable<String> values) {
return transform(values, new Function<String, StringValue>() {
@Override
public StringValue apply(String input) {
return new StringValue(input);
}
});
}
/**
* Transforms the given paths into ids using {@link #getIdFromPath(String)}.
*/
public static Iterable<String> pathToId(@NotNull Iterable<String> paths) {
return transform(paths, new Function<String, String>() {
@Override
public String apply(String input) {
return getIdFromPath(input);
}
});
}
/**
* Returns the highest timestamp of all the passed external revisions.
* A revision is considered external if the clusterId is different from the
* passed {@code localClusterId}.
*
* @param revisions the revisions to consider.
* @param localClusterId the id of the local cluster node.
* @return the highest timestamp or {@link Long#MIN_VALUE} if none of the
* revisions is external.
*/
public static long getMaxExternalTimestamp(Iterable<Revision> revisions,
int localClusterId) {
long maxTime = Long.MIN_VALUE;
for (Revision r : revisions) {
if (r.getClusterId() == localClusterId) {
continue;
}
maxTime = Math.max(maxTime, r.getTimestamp());
}
return maxTime;
}
/**
* Returns the given number instance as a {@code Long}.
*
* @param n a number or {@code null}.
* @return the number converted to a {@code Long} or {@code null}
* if {@code n} is {@code null}.
*/
public static Long asLong(@Nullable Number n) {
if (n == null) {
return null;
} else if (n instanceof Long) {
return (Long) n;
} else {
return n.longValue();
}
}
/**
* Returns the minimum timestamp to use for a query for child documents that
* have been modified between {@code fromRev} and {@code toRev}.
*
* @param fromRev the from revision.
* @param toRev the to revision.
* @param minRevisions the minimum revisions of foreign cluster nodes. These
* are derived from the startTime of a cluster node.
* @return the minimum timestamp.
*/
public static long getMinTimestampForDiff(@NotNull RevisionVector fromRev,
@NotNull RevisionVector toRev,
@NotNull RevisionVector minRevisions) {
// make sure we have minimum revisions for all known cluster nodes
fromRev = fromRev.pmax(minRevisions);
toRev = toRev.pmax(minRevisions);
// keep only revision entries that changed
RevisionVector from = fromRev.difference(toRev);
RevisionVector to = toRev.difference(fromRev);
// now calculate minimum timestamp
long min = Long.MAX_VALUE;
for (Revision r : from) {
min = Math.min(r.getTimestamp(), min);
}
for (Revision r : to) {
min = Math.min(r.getTimestamp(), min);
}
return min;
}
/**
* Returns true if all the revisions in the {@code a} greater or equals
* to their counterparts in {@code b}. If {@code b} contains revisions
* for cluster nodes that are not present in {@code a}, return false.
*
* @param a
* @param b
* @return true if all the revisions in the {@code a} are at least
* as recent as their counterparts in the {@code b}
*/
public static boolean isGreaterOrEquals(@NotNull RevisionVector a,
@NotNull RevisionVector b) {
return a.pmax(b).equals(a);
}
/**
* Returns {@code true} if changes identified by the {@code from} and
* {@code to} {@code RevisionVector} are considered local changes. That is
* the only difference between the two revision vectors are for the given
* (local) {@code clusterId}.
*
* @param from the from revision vector.
* @param to the to revision vector.
* @param clusterId the local clusterId.
* @return whether the changes are considered local.
*/
public static boolean isLocalChange(@NotNull RevisionVector from,
@NotNull RevisionVector to,
int clusterId) {
RevisionVector diff = to.difference(from);
return diff.getDimensions() == 1 && diff.getRevision(clusterId) != null;
}
/**
* Wraps the given iterable and aborts iteration over elements when the
* predicate on an element evaluates to {@code false}. Calling
* {@code close()} on the returned iterable will close the passed iterable
* if it is {@link Closeable}.
*
* @param iterable the iterable to wrap.
* @param p the predicate.
* @return the aborting iterable.
*/
public static <T> CloseableIterable<T> abortingIterable(Iterable<T> iterable,
Predicate<T> p) {
checkNotNull(iterable);
checkNotNull(p);
return new CloseableIterable<T>(() -> {
final Iterator<T> it = iterable.iterator();
return new AbstractIterator<T>() {
@Override
protected T computeNext() {
if (it.hasNext()) {
T next = it.next();
if (p.apply(next)) {
return next;
}
}
return endOfData();
}
};
}, () -> closeIfCloseable(iterable));
}
/**
* Makes sure the current time is after the most recent external revision
* timestamp in the _lastRev map of the given root document. If necessary
* the current thread waits until {@code clock} is after the external
* revision timestamp.
*
* @param rootDoc the root document.
* @param clock the clock.
* @param clusterId the local clusterId.
* @throws InterruptedException if the current thread is interrupted while
* waiting. The interrupted status on the current thread is cleared
* when this exception is thrown.
*/
public static void alignWithExternalRevisions(@NotNull NodeDocument rootDoc,
@NotNull Clock clock,
int clusterId)
throws InterruptedException {
Map<Integer, Revision> lastRevMap = checkNotNull(rootDoc).getLastRev();
long externalTime = Utils.getMaxExternalTimestamp(lastRevMap.values(), clusterId);
long localTime = clock.getTime();
if (localTime < externalTime) {
LOG.warn("Detected clock differences. Local time is '{}', " +
"while most recent external time is '{}'. " +
"Current _lastRev entries: {}",
new Date(localTime), new Date(externalTime), lastRevMap.values());
double delay = ((double) externalTime - localTime) / 1000d;
String fmt = "Background read will be delayed by %.1f seconds. " +
"Please check system time on cluster nodes.";
String msg = String.format(fmt, delay);
LOG.warn(msg);
while (localTime + 60000 < externalTime) {
clock.waitUntil(localTime + 60000);
localTime = clock.getTime();
delay = ((double) externalTime - localTime) / 1000d;
LOG.warn(String.format(fmt, delay));
}
clock.waitUntil(externalTime + 1);
} else if (localTime == externalTime) {
// make sure local time is past external time
// but only log at debug
LOG.debug("Local and external time are equal. Waiting until local" +
"time is more recent than external reported time.");
clock.waitUntil(externalTime + 1);
}
}
/**
* Calls {@link Thread#join()} on each of the passed threads and catches
* any potentially thrown {@link InterruptedException}.
*
* @param threads the threads to join.
*/
public static void joinQuietly(Thread... threads) {
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) {
// ignore
}
}
}
/**
* Returns the version of the module that contains the DocumentNodeStore.
*
* @return the module version or "SNAPSHOT" if unknown.
*/
public static String getModuleVersion() {
String v = MODULE_VERSION;
if (v == null) {
v = OakVersion.getVersion("oak-store-document", Utils.class);
MODULE_VERSION = v;
}
return v;
}
/**
* Check the revision age on the root document for the given cluster node
* info. The check will fail with a {@link DocumentStoreException} if the
* {@code _lastRev} timestamp for the cluster node is newer then the current
* {@code clock} time. The check will not fail if the root document does
* not exist or does not have a {@code _lastRev} entry for the cluster node.
*
* @param store the document store from where to read the root document.
* @param info the cluster node info with the clusterId.
* @param clock the clock to get the current time.
* @throws DocumentStoreException if the check fails.
*/
public static void checkRevisionAge(DocumentStore store,
ClusterNodeInfo info,
Clock clock)
throws DocumentStoreException {
NodeDocument root = store.find(Collection.NODES, getIdFromPath("/"));
if (root == null) {
return;
}
int clusterId = info.getId();
Revision rev = root.getLastRev().get(clusterId);
if (rev == null) {
return;
}
long now = clock.getTime();
if (rev.getTimestamp() > now) {
String msg = String.format("Cluster id %d has a _lastRev %s (%s) " +
"newer than current time %s. Please check system time on " +
"cluster nodes.", clusterId, rev.toString(),
timestampToString(rev.getTimestamp()), timestampToString(now));
throw new DocumentStoreException(msg);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.settings;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MetadataIndexStateService;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexSortConfig;
import org.elasticsearch.index.IndexingSlowLog;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.SearchSlowLog;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.FsDirectoryFactory;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.indices.IndicesRequestCache;
import org.elasticsearch.indices.ShardLimitValidator;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
/**
* Encapsulates all valid index level settings.
* @see Property#IndexScope
*/
public final class IndexScopedSettings extends AbstractScopedSettings {
public static final Predicate<String> INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetadata.INDEX_SETTING_PREFIX);
public static final Set<Setting<?>> BUILT_IN_INDEX_SETTINGS = Set.of(
MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY,
MergeSchedulerConfig.AUTO_THROTTLE_SETTING,
MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING,
MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING,
IndexMetadata.SETTING_INDEX_VERSION_CREATED,
IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING,
IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING,
IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING,
IndexMetadata.INDEX_AUTO_EXPAND_REPLICAS_SETTING,
IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING,
IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING,
IndexMetadata.INDEX_ROUTING_PARTITION_SIZE_SETTING,
IndexMetadata.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING,
IndexMetadata.INDEX_READ_ONLY_SETTING,
IndexMetadata.INDEX_BLOCKS_READ_SETTING,
IndexMetadata.INDEX_BLOCKS_WRITE_SETTING,
IndexMetadata.INDEX_BLOCKS_METADATA_SETTING,
IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING,
IndexMetadata.INDEX_PRIORITY_SETTING,
IndexMetadata.INDEX_DATA_PATH_SETTING,
IndexMetadata.INDEX_HIDDEN_SETTING,
IndexMetadata.INDEX_FORMAT_SETTING,
IndexMetadata.INDEX_ROLLUP_SOURCE_NAME,
IndexMetadata.INDEX_ROLLUP_SOURCE_UUID,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING,
MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_DELETES_PCT_ALLOWED_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING,
IndexSortConfig.INDEX_SORT_FIELD_SETTING,
IndexSortConfig.INDEX_SORT_ORDER_SETTING,
IndexSortConfig.INDEX_SORT_MISSING_SETTING,
IndexSortConfig.INDEX_SORT_MODE_SETTING,
IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING,
IndexSettings.INDEX_WARMER_ENABLED_SETTING,
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING,
IndexSettings.MAX_RESULT_WINDOW_SETTING,
IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING,
IndexSettings.MAX_TOKEN_COUNT_SETTING,
IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING,
IndexSettings.MAX_SCRIPT_FIELDS_SETTING,
IndexSettings.MAX_NGRAM_DIFF_SETTING,
IndexSettings.MAX_SHINGLE_DIFF_SETTING,
IndexSettings.MAX_RESCORE_WINDOW_SETTING,
IndexSettings.MAX_ANALYZED_OFFSET_SETTING,
IndexSettings.MAX_TERMS_COUNT_SETTING,
IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING,
IndexSettings.DEFAULT_FIELD_SETTING,
IndexSettings.QUERY_STRING_LENIENT_SETTING,
IndexSettings.ALLOW_UNMAPPED,
IndexSettings.INDEX_CHECK_ON_STARTUP,
IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD,
IndexSettings.MAX_SLICES_PER_SCROLL,
IndexSettings.MAX_REGEX_LENGTH_SETTING,
ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING,
IndexSettings.INDEX_GC_DELETES_SETTING,
IndexSettings.INDEX_SOFT_DELETES_SETTING,
IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING,
IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING,
IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING,
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING,
IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING,
IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING,
IndexSettings.INDEX_SEARCH_IDLE_AFTER,
IndexSettings.INDEX_SEARCH_THROTTLED,
IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY,
FieldMapper.IGNORE_MALFORMED_SETTING,
FieldMapper.COERCE_SETTING,
Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING,
MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING,
MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING,
BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING,
IndexModule.INDEX_STORE_TYPE_SETTING,
IndexModule.INDEX_STORE_PRE_LOAD_SETTING,
IndexModule.INDEX_RECOVERY_TYPE_SETTING,
IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING,
FsDirectoryFactory.INDEX_LOCK_FACTOR_SETTING,
EngineConfig.INDEX_CODEC_SETTING,
IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS,
IndexSettings.DEFAULT_PIPELINE,
IndexSettings.FINAL_PIPELINE,
MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING,
ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING,
DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS,
ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP,
// validate that built-in similarities don't get redefined
Setting.groupSetting(
"index.similarity.",
(s) -> {
Map<String, Settings> groups = s.getAsGroups();
for (String key : SimilarityService.BUILT_IN.keySet()) {
if (groups.containsKey(key)) {
throw new IllegalArgumentException("illegal value for [index.similarity." + key +
"] cannot redefine built-in similarity");
}
}
},
Property.IndexScope), // this allows similarity settings to be passed
Setting.groupSetting("index.analysis.", Property.IndexScope)); // this allows analysis settings to be passed
public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, BUILT_IN_INDEX_SETTINGS);
public IndexScopedSettings(Settings settings, Set<Setting<?>> settingsSet) {
super(settings, settingsSet, Collections.emptySet(), Property.IndexScope);
}
private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetadata metadata) {
super(settings, metadata.getSettings(), other, Loggers.getLogger(IndexScopedSettings.class, metadata.getIndex()));
}
public IndexScopedSettings copy(Settings settings, IndexMetadata metadata) {
return new IndexScopedSettings(settings, this, metadata);
}
@Override
protected void validateSettingKey(Setting<?> setting) {
if (setting.getKey().startsWith("index.") == false) {
throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "] must start with [index.]");
}
super.validateSettingKey(setting);
}
@Override
public boolean isPrivateSetting(String key) {
switch (key) {
case IndexMetadata.SETTING_CREATION_DATE:
case IndexMetadata.SETTING_INDEX_UUID:
case IndexMetadata.SETTING_HISTORY_UUID:
case IndexMetadata.SETTING_VERSION_UPGRADED:
case IndexMetadata.SETTING_INDEX_PROVIDED_NAME:
case MergePolicyConfig.INDEX_MERGE_ENABLED:
// we keep the shrink settings for BWC - this can be removed in 8.0
// we can't remove in 7 since this setting might be baked into an index coming in via a full cluster restart from 6.0
case "index.shrink.source.uuid":
case "index.shrink.source.name":
case IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY:
case IndexMetadata.INDEX_RESIZE_SOURCE_NAME_KEY:
return true;
default:
return IndexMetadata.INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.getRawKey().match(key);
}
}
}
| |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.camel.component;
import java.io.ByteArrayInputStream;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.math.BigInteger;
import java.util.List;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.xml.bind.Marshaller;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.dataformat.JaxbDataFormat;
import org.drools.core.command.runtime.BatchExecutionCommandImpl;
import org.drools.core.command.runtime.rule.FireAllRulesCommand;
import org.drools.core.command.runtime.rule.InsertObjectCommand;
import org.drools.core.common.InternalRuleBase;
import org.drools.core.impl.KnowledgeBaseImpl;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.Message;
import org.kie.api.io.KieResources;
import org.kie.api.runtime.KieSession;
import org.kie.internal.builder.JaxbConfiguration;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.utils.CompositeClassLoader;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.ExecutionResults;
import com.sun.tools.xjc.Language;
import com.sun.tools.xjc.Options;
public class CamelEndpointWithJaxbXSDModelTest extends KieCamelTestSupport {
private ClassLoader classLoader;
@Test
public void testSessionInsert() throws Exception {
// These 2 classes around defined by person.xsd, not as a class file
Class< ? > personClass = classLoader.loadClass( "org.drools.model.Person" );
assertNotNull( personClass.getPackage() );
Class< ? > addressClass = classLoader.loadClass( "org.drools.model.AddressType" );
assertNotNull( addressClass.getPackage() );
Object baunax = personClass.newInstance();
Object lucaz = personClass.newInstance();
Method setName = personClass.getMethod( "setName",
String.class );
setName.invoke( baunax,
"baunax" );
setName.invoke( lucaz,
"lucaz" );
Method setAddress = personClass.getMethod( "setAddress",
addressClass );
Method setStreet = addressClass.getMethod( "setStreet",
String.class );
Method setPostalCode = addressClass.getMethod( "setPostalCode",
BigInteger.class );
Object lucazAddress = addressClass.newInstance();
setStreet.invoke( lucazAddress,
"Unknow 342" );
setPostalCode.invoke( lucazAddress,
new BigInteger( "1234" ) );
Object baunaxAddress = addressClass.newInstance();
setStreet.invoke( baunaxAddress,
"New Street 123" );
setPostalCode.invoke( baunaxAddress,
new BigInteger( "5678" ) );
setAddress.invoke( lucaz,
lucazAddress );
setAddress.invoke( baunax,
baunaxAddress );
BatchExecutionCommandImpl cmd = new BatchExecutionCommandImpl();
cmd.setLookup( "ksession1" );
cmd.getCommands().add( new InsertObjectCommand( lucaz,
"lucaz" ) );
cmd.getCommands().add( new InsertObjectCommand( baunax,
"baunax" ) );
cmd.getCommands().add( new FireAllRulesCommand() );
StringWriter xmlReq = new StringWriter();
Marshaller marshaller = getJaxbContext().createMarshaller();
marshaller.setProperty( "jaxb.formatted.output",
true );
marshaller.marshal( cmd,
xmlReq );
System.out.println( xmlReq.toString() );
String xmlCmd = "";
xmlCmd += "<batch-execution lookup='ksession1'>\n";
xmlCmd += " <insert out-identifier='lucaz'>\n";
xmlCmd += " <object>\n";
xmlCmd += " <Person xmlns='http://drools.org/model' >\n";
xmlCmd += " <name>lucaz</name>\n";
xmlCmd += " <age>25</age>\n";
xmlCmd += " </Person>\n";
xmlCmd += " </object>\n";
xmlCmd += " </insert>\n";
xmlCmd += " <insert out-identifier='baunax'>\n";
xmlCmd += " <object>\n";
xmlCmd += " <Person xmlns='http://drools.org/model' >\n";
xmlCmd += " <name>baunax</name>\n";
xmlCmd += " <age>21</age>\n";
xmlCmd += " </Person>\n";
xmlCmd += " </object>\n";
xmlCmd += " </insert>\n";
xmlCmd += " <fire-all-rules />";
xmlCmd += "</batch-execution>\n";
byte[] xmlResp = (byte[]) template.requestBody( "direct:test-with-session",
xmlReq.toString() );
assertNotNull( xmlResp );
System.out.println( new String( xmlResp ) );
ExecutionResults resp = (ExecutionResults) getJaxbContext().createUnmarshaller().unmarshal( new ByteArrayInputStream( xmlResp ) );
assertNotNull( resp );
assertEquals( 2,
resp.getIdentifiers().size() );
assertNotNull( resp.getValue( "lucaz" ) );
assertNotNull( resp.getValue( "baunax" ) );
assertNotNull( resp.getFactHandle( "lucaz" ) );
assertNotNull( resp.getFactHandle( "baunax" ) );
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
routeBuilder = new RouteBuilder() {
public void configure() throws Exception {
JaxbDataFormat def = new JaxbDataFormat();
def.setPrettyPrint(true);
def.setContextPath("org.kie.pipeline.camel");
from("direct:test-with-session").policy(new KiePolicy()).
unmarshal(def).to("kie:ksession1").marshal(def);
}
};
return routeBuilder;
}
@Override
protected void configureDroolsContext(Context jndiContext) {
String rule = "";
rule += "package org.kie.pipeline.camel.test \n";
rule += "import org.drools.model.Person \n";
rule += "global java.util.List list \n";
rule += "query persons \n";
rule += " $p : Person(name != null) \n";
rule += "end \n";
rule += "query personWithName(String param)\n";
rule += " $p : Person(name == param) \n";
rule += "end \n";
rule += "rule rule1 \n";
rule += " when \n";
rule += " $p : Person() \n";
rule += " \n";
rule += " then \n";
rule += " System.out.println(\"executed\"); \n";
rule += "end\n";
registerKnowledgeRuntime( "ksession1",
rule );
}
@Override
protected KieSession registerKnowledgeRuntime(String identifier, String rule) {
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
KieResources kieResources = ks.getResources();
Options xjcOpts = new Options();
xjcOpts.setSchemaLanguage( Language.XMLSCHEMA );
JaxbConfiguration jaxbConfiguration = KnowledgeBuilderFactory.newJaxbConfiguration(xjcOpts, "xsd");
kfs.write(kieResources.newClassPathResource("person.xsd", getClass())
.setResourceType(ResourceType.XSD)
.setConfiguration(jaxbConfiguration));
if ( rule != null && rule.length() > 0 ) {
kfs.write( "src/main/resources/rule.drl", rule );
}
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
List<Message> errors = kieBuilder.getResults().getMessages(Message.Level.ERROR);
if (!errors.isEmpty()) {
fail("" + errors);
}
KieSession ksession = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).newKieSession();
classLoader = ((InternalRuleBase) ((KnowledgeBaseImpl) ksession.getKieBase()).getRuleBase()).getRootClassLoader();
try {
jndiContext.bind( identifier, ksession );
} catch (NamingException e) {
throw new RuntimeException(e);
}
return ksession;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.distribution.agent.impl;
import java.util.Arrays;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.distribution.DistributionRequest;
import org.apache.sling.distribution.DistributionRequestState;
import org.apache.sling.distribution.DistributionRequestType;
import org.apache.sling.distribution.DistributionResponse;
import org.apache.sling.distribution.SimpleDistributionRequest;
import org.apache.sling.distribution.event.impl.DistributionEventFactory;
import org.apache.sling.distribution.log.impl.DefaultDistributionLog;
import org.apache.sling.distribution.serialization.DistributionPackage;
import org.apache.sling.distribution.packaging.DistributionPackageExporter;
import org.apache.sling.distribution.packaging.DistributionPackageImporter;
import org.apache.sling.distribution.serialization.DistributionPackageInfo;
import org.apache.sling.distribution.queue.DistributionQueue;
import org.apache.sling.distribution.queue.DistributionQueueItemState;
import org.apache.sling.distribution.queue.impl.DistributionQueueDispatchingStrategy;
import org.apache.sling.distribution.queue.DistributionQueueItemStatus;
import org.apache.sling.distribution.queue.DistributionQueueProvider;
import org.apache.sling.distribution.queue.impl.simple.SimpleDistributionQueue;
import org.apache.sling.jcr.api.SlingRepository;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Testcase for {@link SimpleDistributionAgent}
*/
public class SimpleDistributionAgentTest {
@Test
public void testDistributionWithFailingDistributionStrategy() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy distributionHandler = mock(DistributionQueueDispatchingStrategy.class);
Iterable<DistributionQueueItemStatus> states = Arrays.asList(new DistributionQueueItemStatus(DistributionQueueItemState.ERROR, DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME));
when(distributionHandler.add(any(DistributionPackage.class), any(DistributionQueueProvider.class))).thenReturn(states);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider, distributionHandler, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class), mock(DefaultDistributionLog.class), null, null, 0);
DistributionRequest request = new SimpleDistributionRequest(DistributionRequestType.ADD, "/");
DistributionPackage distributionPackage = mock(DistributionPackage.class);
ResourceResolver resourceResolver = mock(ResourceResolver.class);
when(distributionPackage.getInfo()).thenReturn(new DistributionPackageInfo("type"));
when(packageExporter.exportPackages(any(ResourceResolver.class), any(DistributionRequest.class)))
.thenReturn(Arrays.asList(distributionPackage));
when(queueProvider.getQueue(DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME)).thenReturn(
new SimpleDistributionQueue(name, "name"));
DistributionResponse response = agent.execute(resourceResolver, request);
assertNotNull(response);
assertEquals("ERROR", response.getMessage());
assertEquals(DistributionRequestState.DROPPED, response.getState());
}
@Test
public void testDistributionWithWorkingDistributionStrategy() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy distributionHandler = mock(DistributionQueueDispatchingStrategy.class);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "subServiceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider,
distributionHandler, null, distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, null, 0);
DistributionRequest request = new SimpleDistributionRequest(DistributionRequestType.ADD, "/");
DistributionPackage distributionPackage = mock(DistributionPackage.class);
ResourceResolver resourceResolver = mock(ResourceResolver.class);
when(distributionPackage.getInfo()).thenReturn(new DistributionPackageInfo("type"));
Iterable<DistributionQueueItemStatus> states = Arrays.asList(new DistributionQueueItemStatus(DistributionQueueItemState.QUEUED,
DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME));
when(distributionHandler.add(any(DistributionPackage.class), any(DistributionQueueProvider.class))).thenReturn(states);
when(packageExporter.exportPackages(any(ResourceResolver.class), any(DistributionRequest.class)))
.thenReturn(Arrays.asList(distributionPackage));
when(queueProvider.getQueue(DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME)).thenReturn(
new SimpleDistributionQueue(name, "name"));
DistributionResponse response = agent.execute(resourceResolver, request);
assertNotNull(response);
assertEquals("QUEUED", response.getMessage());
assertEquals(DistributionRequestState.ACCEPTED, response.getState());
}
@Test
public void testDistribution() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy distributionHandler = mock(DistributionQueueDispatchingStrategy.class);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider, distributionHandler, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, null, 0);
DistributionRequest request = new SimpleDistributionRequest(DistributionRequestType.ADD, "/");
DistributionPackage distributionPackage = mock(DistributionPackage.class);
DistributionPackageInfo packageInfo = new DistributionPackageInfo("type");
when(distributionPackage.getInfo()).thenReturn(packageInfo);
ResourceResolver resourceResolver = mock(ResourceResolver.class);
when(packageExporter.exportPackages(resourceResolver, request)).thenReturn(Arrays.asList(distributionPackage));
when(queueProvider.getQueue(DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME)).thenReturn(
new SimpleDistributionQueue(name, "name"));
agent.execute(resourceResolver, request);
}
@Test
public void testGetExistingNamedQueue() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy authorizationStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy dispatchingStrategy = mock(DistributionQueueDispatchingStrategy.class);
when(dispatchingStrategy.getQueueNames()).thenReturn(Arrays.asList(new String[]{ "priority" }));
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, authorizationStrategy,
queueProvider, dispatchingStrategy, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, null, 0);
DistributionQueue queue = mock(DistributionQueue.class);
when(queueProvider.getQueue("priority")).thenReturn(queue);
assertNotNull(agent.getQueue("priority"));
}
@Test
public void testGetNonExistingNamedQueue() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy distributionHandler = mock(DistributionQueueDispatchingStrategy.class);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider, distributionHandler, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, null, 0);
DistributionQueue queue = mock(DistributionQueue.class);
when(queueProvider.getQueue("priority")).thenReturn(queue);
assertNull(agent.getQueue("weird"));
}
@Test
public void testDistributionWithAllowedRoot() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy queueDistributionStrategy = mock(DistributionQueueDispatchingStrategy.class);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
when(queueDistributionStrategy.add(any(DistributionPackage.class), any(DistributionQueueProvider.class))).thenReturn(Arrays.asList(
new DistributionQueueItemStatus(DistributionQueueItemState.QUEUED, "default")
));
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider, queueDistributionStrategy, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, new String[] { "/content" }, 0);
DistributionRequest request = new SimpleDistributionRequest(DistributionRequestType.ADD, "/content");
DistributionPackage distributionPackage = mock(DistributionPackage.class);
DistributionPackageInfo packageInfo = new DistributionPackageInfo("type");
when(distributionPackage.getInfo()).thenReturn(packageInfo);
ResourceResolver resourceResolver = mock(ResourceResolver.class);
queueDistributionStrategy.add(distributionPackage, queueProvider);
when(packageExporter.exportPackages(any(ResourceResolver.class), any(DistributionRequest.class))).thenReturn(Arrays.asList(distributionPackage));
when(queueProvider.getQueue(DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME)).thenReturn(
new SimpleDistributionQueue(name, "name"));
DistributionResponse response = agent.execute(resourceResolver, request);
assertTrue(response.isSuccessful());
}
@Test
public void testDistributionWithDisallowedRoot() throws Exception {
String name = "sample-agent";
DistributionPackageImporter packageImporter = mock(DistributionPackageImporter.class);
DistributionPackageExporter packageExporter = mock(DistributionPackageExporter.class);
DistributionRequestAuthorizationStrategy packageExporterStrategy = mock(DistributionRequestAuthorizationStrategy.class);
DistributionQueueProvider queueProvider = mock(DistributionQueueProvider.class);
DistributionQueueDispatchingStrategy queueDistributionStrategy = mock(DistributionQueueDispatchingStrategy.class);
DistributionEventFactory distributionEventFactory = mock(DistributionEventFactory.class);
ResourceResolverFactory resolverFactory = mock(ResourceResolverFactory.class);
when(queueDistributionStrategy.add(any(DistributionPackage.class), any(DistributionQueueProvider.class))).thenReturn(Arrays.asList(
new DistributionQueueItemStatus(DistributionQueueItemState.QUEUED, "default")
));
SimpleDistributionAgent agent = new SimpleDistributionAgent(name,
false, null, "serviceName", packageImporter,
packageExporter, packageExporterStrategy,
queueProvider, queueDistributionStrategy, null,
distributionEventFactory, resolverFactory, mock(SlingRepository.class),
mock(DefaultDistributionLog.class), null, new String[] { "/content" }, 0);
DistributionRequest request = new SimpleDistributionRequest(DistributionRequestType.ADD, "/home");
DistributionPackage distributionPackage = mock(DistributionPackage.class);
DistributionPackageInfo packageInfo = new DistributionPackageInfo("type");
when(distributionPackage.getInfo()).thenReturn(packageInfo);
ResourceResolver resourceResolver = mock(ResourceResolver.class);
queueDistributionStrategy.add(distributionPackage, queueProvider);
when(packageExporter.exportPackages(any(ResourceResolver.class), any(DistributionRequest.class))).thenReturn(Arrays.asList(distributionPackage));
when(queueProvider.getQueue(DistributionQueueDispatchingStrategy.DEFAULT_QUEUE_NAME)).thenReturn(
new SimpleDistributionQueue(name, "name"));
DistributionResponse response = agent.execute(resourceResolver, request);
assertFalse(response.isSuccessful());
}
}
| |
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.monitor.impl;
import com.eclipsesource.json.JsonObject;
import com.hazelcast.monitor.LocalQueueStats;
import com.hazelcast.util.Clock;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import static com.hazelcast.util.JsonUtil.getInt;
import static com.hazelcast.util.JsonUtil.getLong;
import static java.util.concurrent.atomic.AtomicLongFieldUpdater.newUpdater;
public class LocalQueueStatsImpl implements LocalQueueStats {
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_OFFERS =
newUpdater(LocalQueueStatsImpl.class, "numberOfOffers");
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_REJECTED_OFFERS =
newUpdater(LocalQueueStatsImpl.class, "numberOfRejectedOffers");
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_POLLS =
newUpdater(LocalQueueStatsImpl.class, "numberOfPolls");
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_EMPTY_POLLS =
newUpdater(LocalQueueStatsImpl.class, "numberOfEmptyPolls");
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_OTHER_OPERATIONS =
newUpdater(LocalQueueStatsImpl.class, "numberOfOtherOperations");
private static final AtomicLongFieldUpdater<LocalQueueStatsImpl> NUMBER_OF_EVENTS =
newUpdater(LocalQueueStatsImpl.class, "numberOfEvents");
private int ownedItemCount;
private int backupItemCount;
private long minAge;
private long maxAge;
private long aveAge;
private long creationTime;
// These fields are only accessed through the updater
private volatile long numberOfOffers;
private volatile long numberOfRejectedOffers;
private volatile long numberOfPolls;
private volatile long numberOfEmptyPolls;
private volatile long numberOfOtherOperations;
private volatile long numberOfEvents;
public LocalQueueStatsImpl() {
creationTime = Clock.currentTimeMillis();
}
@Override
public long getMinAge() {
return minAge;
}
public void setMinAge(long minAge) {
this.minAge = minAge;
}
@Override
public long getMaxAge() {
return maxAge;
}
public void setMaxAge(long maxAge) {
this.maxAge = maxAge;
}
@Override
public long getAvgAge() {
return aveAge;
}
public void setAveAge(long aveAge) {
this.aveAge = aveAge;
}
@Override
public long getOwnedItemCount() {
return ownedItemCount;
}
public void setOwnedItemCount(int ownedItemCount) {
this.ownedItemCount = ownedItemCount;
}
@Override
public long getBackupItemCount() {
return backupItemCount;
}
public void setBackupItemCount(int backupItemCount) {
this.backupItemCount = backupItemCount;
}
@Override
public long getCreationTime() {
return creationTime;
}
public long total() {
return numberOfOffers + numberOfPolls + numberOfOtherOperations;
}
@Override
public long getOfferOperationCount() {
return numberOfOffers;
}
@Override
public long getRejectedOfferOperationCount() {
return numberOfRejectedOffers;
}
@Override
public long getPollOperationCount() {
return numberOfPolls;
}
@Override
public long getEmptyPollOperationCount() {
return numberOfEmptyPolls;
}
@Override
public long getOtherOperationsCount() {
return numberOfOtherOperations;
}
public void incrementOtherOperations() {
NUMBER_OF_OTHER_OPERATIONS.incrementAndGet(this);
}
public void incrementOffers() {
NUMBER_OF_OFFERS.incrementAndGet(this);
}
public void incrementRejectedOffers() {
NUMBER_OF_REJECTED_OFFERS.incrementAndGet(this);
}
public void incrementPolls() {
NUMBER_OF_POLLS.incrementAndGet(this);
}
public void incrementEmptyPolls() {
NUMBER_OF_EMPTY_POLLS.incrementAndGet(this);
}
public void incrementReceivedEvents() {
NUMBER_OF_EVENTS.incrementAndGet(this);
}
@Override
public long getEventOperationCount() {
return numberOfEvents;
}
@Override
public JsonObject toJson() {
JsonObject root = new JsonObject();
root.add("ownedItemCount", ownedItemCount);
root.add("backupItemCount", backupItemCount);
root.add("minAge", minAge);
root.add("maxAge", maxAge);
root.add("aveAge", aveAge);
root.add("creationTime", creationTime);
root.add("numberOfOffers", numberOfOffers);
root.add("numberOfPolls", numberOfPolls);
root.add("numberOfRejectedOffers", numberOfRejectedOffers);
root.add("numberOfEmptyPolls", numberOfEmptyPolls);
root.add("numberOfOtherOperations", numberOfOtherOperations);
root.add("numberOfEvents", numberOfEvents);
return root;
}
@Override
public void fromJson(JsonObject json) {
ownedItemCount = getInt(json, "ownedItemCount", -1);
backupItemCount = getInt(json, "backupItemCount", -1);
minAge = getLong(json, "minAge", -1L);
maxAge = getLong(json, "maxAge", -1L);
aveAge = getLong(json, "aveAge", -1L);
creationTime = getLong(json, "creationTime", -1L);
NUMBER_OF_OFFERS.set(this, getLong(json, "numberOfOffers", -1L));
NUMBER_OF_POLLS.set(this, getLong(json, "numberOfPolls", -1L));
NUMBER_OF_REJECTED_OFFERS.set(this, getLong(json, "numberOfRejectedOffers", -1L));
NUMBER_OF_EMPTY_POLLS.set(this, getLong(json, "numberOfEmptyPolls", -1L));
NUMBER_OF_OTHER_OPERATIONS.set(this, getLong(json, "numberOfOtherOperations", -1L));
NUMBER_OF_EVENTS.set(this, getLong(json, "numberOfEvents", -1L));
}
@Override
public String toString() {
return "LocalQueueStatsImpl{"
+ "ownedItemCount=" + ownedItemCount
+ ", backupItemCount=" + backupItemCount
+ ", minAge=" + minAge
+ ", maxAge=" + maxAge
+ ", aveAge=" + aveAge
+ ", creationTime=" + creationTime
+ ", numberOfOffers=" + numberOfOffers
+ ", numberOfRejectedOffers=" + numberOfRejectedOffers
+ ", numberOfPolls=" + numberOfPolls
+ ", numberOfEmptyPolls=" + numberOfEmptyPolls
+ ", numberOfOtherOperations=" + numberOfOtherOperations
+ ", numberOfEvents=" + numberOfEvents
+ '}';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.testutils.migration;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import javax.annotation.Nullable;
import java.util.Objects;
import java.util.function.Function;
/**
* The {@link SchemaCompatibilityTestingSerializer} is a mock serializer that can be used
* for schema compatibility and serializer upgrade related tests.
*
* <p>To test serializers compatibility we can start by either obtaining a {@link TypeSerializerSnapshot} and restoring
* a {@link TypeSerializer} or the other way around, starting with a serializer and calling {@link TypeSerializer#snapshotConfiguration()}
* to obtain a snapshot class.
*
* <p>To start from a snapshot, the class {@link SchemaCompatibilityTestingSnapshot} can be configured to return a predefined
* {@link TypeSerializerSchemaCompatibility} result when
* {@link TypeSerializerSnapshot#resolveSchemaCompatibility(TypeSerializer)} would be called, the following static factory
* methods return a pre-configured snapshot class:
* <ul>
* <li>{@code thatIsCompatibleWithNextSerializer}.
* <li>{@code thatIsCompatibleWithNextSerializerAfterReconfiguration}</li>
* <li>{@code thatIsCompatibleWithNextSerializerAfterMigration}</li>
* <li>{@code thatIsIncompatibleWithTheNextSerializer}</li>
* </ul>
*
* <p>Here is a simple test example.<pre>{@code
* @Test
* public void example() {
* TypeSerializerSnapshot<?> snapshot = SchemaCompatibilityTestingSnapshot.thatIsCompatibleWithNextSerializer();
*
* TypeSerializerSchemaCompatibility<?> result = snapshot.resolveSchemaCompatibility(new SchemaCompatibilityTestingSerializer());
*
* assertTrue(result.compatibleAsIs());
* }
* }</pre>
*
* <p>To start from a serializer, simply create a new instance of {@code SchemaCompatibilityTestingSerializer} and call
* {@link SchemaCompatibilityTestingSerializer#snapshotConfiguration()} to obtain a {@link SchemaCompatibilityTestingSnapshot}.
* To control the behaviour of the returned snapshot it is possible to pass a function from a {@code newSerializer} to a
* {@link TypeSerializerSchemaCompatibility}.
*
* <p>It is also possible to pass in a {@code String} identifier when constructing a snapshot or a serializer to tie a
* specific snapshot instance to a specific serializer instance, this might be useful when testing composite serializers.
*/
@SuppressWarnings({"WeakerAccess", "serial"})
public final class SchemaCompatibilityTestingSerializer extends TypeSerializer<Integer> {
private static final long serialVersionUID = 2588814752302505240L;
private final Function<TypeSerializer<Integer>, TypeSerializerSchemaCompatibility<Integer>> resolver;
@Nullable
private final String tokenForEqualityChecks;
public SchemaCompatibilityTestingSerializer() {
this(null, ALWAYS_COMPATIBLE);
}
public SchemaCompatibilityTestingSerializer(String tokenForEqualityChecks) {
this(tokenForEqualityChecks, ALWAYS_COMPATIBLE);
}
public SchemaCompatibilityTestingSerializer(
@Nullable String tokenForEqualityChecks,
Function<TypeSerializer<Integer>, TypeSerializerSchemaCompatibility<Integer>> resolver) {
this.resolver = resolver;
this.tokenForEqualityChecks = tokenForEqualityChecks;
}
@Override
public boolean equals(Object obj) {
return (obj instanceof SchemaCompatibilityTestingSerializer) &&
Objects.equals(tokenForEqualityChecks, ((SchemaCompatibilityTestingSerializer) obj).tokenForEqualityChecks);
}
@Override
public int hashCode() {
return Objects.hash(getClass(), tokenForEqualityChecks);
}
@Override
public String toString() {
return "SchemaCompatibilityTestingSerializer{" +
"tokenForEqualityChecks='" + tokenForEqualityChecks + '\'' +
'}';
}
@Override
public TypeSerializerSnapshot<Integer> snapshotConfiguration() {
return new SchemaCompatibilityTestingSnapshot(tokenForEqualityChecks, resolver);
}
// -----------------------------------------------------------------------------------------------------------
// Serialization related methods are not supported
// -----------------------------------------------------------------------------------------------------------
@Override
public boolean isImmutableType() {
throw new UnsupportedOperationException();
}
@Override
public TypeSerializer<Integer> duplicate() {
throw new UnsupportedOperationException();
}
@Override
public Integer createInstance() {
throw new UnsupportedOperationException();
}
@Override
public Integer copy(Integer from) {
throw new UnsupportedOperationException();
}
@Override
public Integer copy(Integer from, Integer reuse) {
throw new UnsupportedOperationException();
}
@Override
public int getLength() {
throw new UnsupportedOperationException();
}
@Override
public void serialize(Integer record, DataOutputView target) {
throw new UnsupportedOperationException();
}
@Override
public Integer deserialize(DataInputView source) {
throw new UnsupportedOperationException();
}
@Override
public Integer deserialize(Integer reuse, DataInputView source) {
throw new UnsupportedOperationException();
}
@Override
public void copy(DataInputView source, DataOutputView target) {
throw new UnsupportedOperationException();
}
// -----------------------------------------------------------------------------------------------------------
// Utils
// -----------------------------------------------------------------------------------------------------------
private static final Function<TypeSerializer<Integer>, TypeSerializerSchemaCompatibility<Integer>> ALWAYS_COMPATIBLE =
unused -> TypeSerializerSchemaCompatibility.compatibleAsIs();
// -----------------------------------------------------------------------------------------------------------
// Snapshot class
// -----------------------------------------------------------------------------------------------------------
/**
* A configurable {@link TypeSerializerSnapshot} for this serializer.
*/
@SuppressWarnings("WeakerAccess")
public static final class SchemaCompatibilityTestingSnapshot implements TypeSerializerSnapshot<Integer> {
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializer() {
return thatIsCompatibleWithNextSerializer(null);
}
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializer(String tokenForEqualityChecks) {
return new SchemaCompatibilityTestingSnapshot(tokenForEqualityChecks, unused -> TypeSerializerSchemaCompatibility.compatibleAsIs());
}
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializerAfterReconfiguration() {
return thatIsCompatibleWithNextSerializerAfterReconfiguration(null);
}
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializerAfterReconfiguration(String tokenForEqualityChecks) {
SchemaCompatibilityTestingSerializer reconfiguredSerializer = new SchemaCompatibilityTestingSerializer(tokenForEqualityChecks, ALWAYS_COMPATIBLE);
return new SchemaCompatibilityTestingSnapshot(tokenForEqualityChecks, unused -> TypeSerializerSchemaCompatibility.compatibleWithReconfiguredSerializer(reconfiguredSerializer));
}
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializerAfterMigration() {
return thatIsCompatibleWithNextSerializerAfterMigration(null);
}
public static SchemaCompatibilityTestingSnapshot thatIsCompatibleWithNextSerializerAfterMigration(String tokenForEqualityChecks) {
return new SchemaCompatibilityTestingSnapshot(tokenForEqualityChecks, unused -> TypeSerializerSchemaCompatibility.compatibleAfterMigration());
}
public static SchemaCompatibilityTestingSnapshot thatIsIncompatibleWithTheNextSerializer() {
return thatIsIncompatibleWithTheNextSerializer(null);
}
public static SchemaCompatibilityTestingSnapshot thatIsIncompatibleWithTheNextSerializer(String tokenForEqualityChecks) {
return new SchemaCompatibilityTestingSnapshot(tokenForEqualityChecks, unused -> TypeSerializerSchemaCompatibility.incompatible());
}
@Nullable
private final String tokenForEqualityChecks;
private final Function<TypeSerializer<Integer>, TypeSerializerSchemaCompatibility<Integer>> resolver;
SchemaCompatibilityTestingSnapshot(@Nullable String tokenForEqualityChecks, Function<TypeSerializer<Integer>, TypeSerializerSchemaCompatibility<Integer>> resolver) {
this.tokenForEqualityChecks = tokenForEqualityChecks;
this.resolver = resolver;
}
@Override
public TypeSerializerSchemaCompatibility<Integer> resolveSchemaCompatibility(TypeSerializer<Integer> newSerializer) {
if (!(newSerializer instanceof SchemaCompatibilityTestingSerializer)) {
return TypeSerializerSchemaCompatibility.incompatible();
}
SchemaCompatibilityTestingSerializer schemaCompatibilityTestingSerializer = (SchemaCompatibilityTestingSerializer) newSerializer;
if (!(Objects.equals(schemaCompatibilityTestingSerializer.tokenForEqualityChecks, tokenForEqualityChecks))) {
return TypeSerializerSchemaCompatibility.incompatible();
}
return resolver.apply(newSerializer);
}
@Override
public int getCurrentVersion() {
throw new UnsupportedOperationException();
}
@Override
public void writeSnapshot(DataOutputView out) {
throw new UnsupportedOperationException();
}
@Override
public void readSnapshot(int readVersion, DataInputView in, ClassLoader userCodeClassLoader) {
throw new UnsupportedOperationException();
}
@Override
public TypeSerializer<Integer> restoreSerializer() {
return new SchemaCompatibilityTestingSerializer(tokenForEqualityChecks, resolver);
}
}
}
| |
package org.uma.jmetal.algorithm.multiobjective.wasfga;
import org.uma.jmetal.algorithm.InteractiveAlgorithm;
import org.uma.jmetal.algorithm.multiobjective.mombi.AbstractMOMBI;
import org.uma.jmetal.algorithm.multiobjective.mombi.util.ASFWASFGA;
import org.uma.jmetal.algorithm.multiobjective.mombi.util.AbstractUtilityFunctionsSet;
import org.uma.jmetal.algorithm.multiobjective.wasfga.util.WASFGARanking;
import org.uma.jmetal.algorithm.multiobjective.wasfga.util.WeightVectors;
import org.uma.jmetal.operator.crossover.CrossoverOperator;
import org.uma.jmetal.operator.mutation.MutationOperator;
import org.uma.jmetal.operator.selection.SelectionOperator;
import org.uma.jmetal.problem.Problem;
import org.uma.jmetal.solution.Solution;
import org.uma.jmetal.util.JMetalException;
import org.uma.jmetal.util.SolutionListUtils;
import org.uma.jmetal.util.evaluator.SolutionListEvaluator;
import org.uma.jmetal.util.fileinput.VectorFileUtils;
import org.uma.jmetal.util.solutionattribute.Ranking;
import java.util.ArrayList;
import java.util.List;
/**
* Implementation of the preference based algorithm named WASF-GA on jMetal5.0
*
* @author Juanjo Durillo
*
* This algorithm is described in the paper: A.B. Ruiz, R. Saborido, M.
* Luque "A Preference-based Evolutionary Algorithm for Multiobjective
* Optimization: The Weighting Achievement Scalarizing Function Genetic
* Algorithm". Journal of Global Optimization. May 2015, Volume 62,
* Issue 1, pp 101-129
* DOI = {10.1007/s10898-014-0214-y}
*/
public class WASFGA<S extends Solution<?>> extends AbstractMOMBI<S> implements
InteractiveAlgorithm<S,List<S>> {
private static final long serialVersionUID = 1L;
protected int maxEvaluations;
protected int evaluations;
protected double epsilon ;
protected double[][] weights;
private final AbstractUtilityFunctionsSet<S> achievementScalarizingFunction;
private List<Double> interestPoint = null;
private String weightVectorsFileName = "" ;
/**
* Constructor
*
* @param problem Problem to solve
*/
public WASFGA(Problem<S> problem,
int populationSize,
int maxIterations,
CrossoverOperator<S> crossoverOperator,
MutationOperator<S> mutationOperator,
SelectionOperator<List<S>, S> selectionOperator,
SolutionListEvaluator<S> evaluator,
double epsilon,
List<Double> referencePoint,
String weightVectorsFileName) {
super(problem,maxIterations,crossoverOperator,mutationOperator,selectionOperator,evaluator);
this.weightVectorsFileName = weightVectorsFileName ;
setMaxPopulationSize(populationSize);
this.interestPoint = referencePoint;
this.achievementScalarizingFunction = createUtilityFunction();
this.epsilon = epsilon ;
}
/**
* Constructor
*
* @param problem Problem to solve
*/
public WASFGA(Problem<S> problem,
int populationSize,
int maxIterations,
CrossoverOperator<S> crossoverOperator,
MutationOperator<S> mutationOperator,
SelectionOperator<List<S>, S> selectionOperator,
SolutionListEvaluator<S> evaluator,
double epsilon,
List<Double> referencePoint) {
this(problem,
populationSize,
maxIterations,
crossoverOperator,
mutationOperator,
selectionOperator,
evaluator,
epsilon,
referencePoint,
"") ;
}
public AbstractUtilityFunctionsSet<S> createUtilityFunction() {
//If a file with weight vectors is not given as parameter, weights are calculated or read from the resources file of jMetal
if ("".equals(this.weightVectorsFileName)) {
//For two biobjective problems weights are computed
if (problem.getNumberOfObjectives() == 2) {
weights = WeightVectors.initializeUniformlyInTwoDimensions(epsilon, getMaxPopulationSize());
}
//For more than two objectives, weights are read from the resources file of jMetal
else {
String dataFileName = "W" + problem.getNumberOfObjectives() + "D_" + getMaxPopulationSize() + ".dat";
weights = VectorFileUtils.readVectors(dataFileName);
}
} else { //If a file with weight vectors is given as parameter, weights are read from that file
//weights = WeightVectors.readFromFile(this.weightVectorsFileName) ;
weights = VectorFileUtils.readVectors(this.weightVectorsFileName) ;
}
weights = WeightVectors.invert(weights,true);
//We validate that the weight vectors are valid:
//The number of components of each weight is similar to the number of objectives of the problem being solved.
if (!WeightVectors.validate(weights, problem.getNumberOfObjectives()))
{
throw new JMetalException("Weight vectors are invalid. Check that weight vectors have as many components" +
" as objectives the problem being solved has.") ;
}
//By default, the algorithm uses as many weight vectors as individual in the population.
//In a future, a new parameter should be added to specify the number of weight vectors to use.
//As it is mentioned in the paper, the number of weight vectors must lower than or equal to the population size.
if (weights.length != maxPopulationSize) {
throw new JMetalException("The number of weight vectors (" + weights.length +") and the population size (" +
maxPopulationSize + ") have different values. This behaviour will change in a future " +
"version.") ;
}
return new ASFWASFGA<>(weights, interestPoint);
}
public void updatePointOfInterest(List<Double> newPointOfInterest) {
((ASFWASFGA<S>)this.achievementScalarizingFunction).updatePointOfInterest(newPointOfInterest);
}
public int getPopulationSize() {
return getMaxPopulationSize();
}
@Override
public void specificMOEAComputations() {
updateNadirPoint(this.getPopulation());
updateReferencePoint(this.getPopulation());
}
@Override
protected List<S> replacement(List<S> population, List<S> offspringPopulation) {
List<S> jointPopulation = new ArrayList<>();
jointPopulation.addAll(population);
jointPopulation.addAll(offspringPopulation);
Ranking<S> ranking = computeRanking(jointPopulation);
return selectBest(ranking);
}
protected Ranking<S> computeRanking(List<S> solutionList) {
Ranking<S> ranking = new WASFGARanking<>(this.achievementScalarizingFunction);
ranking.computeRanking(solutionList);
return ranking;
}
protected void addRankedSolutionsToPopulation(Ranking<S> ranking, int index, List<S> population) {
population.addAll(ranking.getSubFront(index));
}
protected void addLastRankedSolutionsToPopulation(Ranking<S> ranking, int index, List<S> population) {
List<S> front = ranking.getSubFront(index);
int remain = this.getPopulationSize() - population.size();
population.addAll(front.subList(0, remain));
}
protected List<S> selectBest(Ranking<S> ranking) {
List<S> population = new ArrayList<>(this.getPopulationSize());
int rankingIndex = 0;
while (populationIsNotFull(population)) {
if (subfrontFillsIntoThePopulation(ranking, rankingIndex, population)) {
addRankedSolutionsToPopulation(ranking, rankingIndex, population);
rankingIndex++;
} else {
addLastRankedSolutionsToPopulation(ranking, rankingIndex, population);
}
}
return population;
}
private boolean subfrontFillsIntoThePopulation(Ranking<S> ranking, int index, List<S> population) {
return (population.size()+ranking.getSubFront(index).size() < this.getPopulationSize());
}
@Override public List<S> getResult() {
return getNonDominatedSolutions(getPopulation());
}
protected List<S> getNonDominatedSolutions(List<S> solutionList) {
return SolutionListUtils.getNonDominatedSolutions(solutionList);
}
@Override public String getName() {
return "WASFGA" ;
}
@Override public String getDescription() {
return "Weighting Achievement Scalarizing Function Genetic Algorithm" ;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/**
*
*/
package com.gemstone.gemfire.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.cache.CacheWriterException;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.cache.EntryNotFoundException;
import com.gemstone.gemfire.cache.StatisticsDisabledException;
import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.ByteArrayDataInput;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl.FactoryStatics;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl.StaticSystemCallbacks;
import com.gemstone.gemfire.internal.cache.locks.ExclusiveSharedLockObject;
import com.gemstone.gemfire.internal.cache.locks.LockMode;
import com.gemstone.gemfire.internal.cache.locks.LockingPolicy;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionStamp;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.shared.Version;
public class NonLocalRegionEntry implements RegionEntry, VersionStamp {
protected long lastModified;
protected boolean isRemoved;
protected Object key;
protected Object value;
private VersionTag<?> versionTag;
/**
* Create one of these in the local case so that we have a snapshot of the
* state and can allow the bucket to move out from under us.
*/
protected NonLocalRegionEntry(RegionEntry re, LocalRegion br,
boolean allowTombstones) {
this.key = re.getKeyCopy();
// client get() operations need to see tombstone values
if (allowTombstones && re.isTombstone()) {
this.value = Token.TOMBSTONE;
} else {
this.value = re.getValue(br); // OFFHEAP: copy into heap cd
}
Assert.assertTrue(this.value != Token.NOT_AVAILABLE,
"getEntry did not fault value in from disk");
this.lastModified = re.getLastModified();
this.isRemoved = re.isRemoved();
VersionStamp<?> stamp = re.getVersionStamp();
if (stamp != null) {
this.versionTag = stamp.asVersionTag();
}
}
/* If below is enabled then use the factory methods below to work correctly
* for GemFireXD
*
* Create one of these in the local case so that we have a snapshot of the state
* and can allow the bucket to move out from under us.
*
public NonLocalRegionEntry(LocalRegion br,Object key,Object value) {
this.key = key;
this.value = value;
Assert.assertTrue(this.value != Token.NOT_AVAILABLE, "getEntry did not fault value in from disk");
// this.lastModified = re.getLastModified();
// this.isRemoved = re.isRemoved();
}
*/
/**
* Create one of these in the local case so that we have a snapshot of the state
* and can allow the bucket to move out from under us.
*/
protected NonLocalRegionEntry(Object key, Object value, LocalRegion br,
VersionTag<?> versionTag) {
this.key = key;
this.value = value;
if (this.value instanceof CachedDeserializable) {
// We make a copy of the CachedDeserializable.
// That way the NonLocalRegionEntry will be disconnected
// from the CachedDeserializable that is in our cache and
// will not modify its state.
this.value = CachedDeserializableFactory.create((CachedDeserializable)this.value);
}
Assert.assertTrue(this.value != Token.NOT_AVAILABLE, "getEntry did not fault value in from disk");
this.lastModified = 0l;//re.getStatistics().getLastModifiedTime();
this.isRemoved = Token.isRemoved(value);
// TODO need to get version information from transaction entries
this.versionTag = versionTag;
}
@Override
public String toString() {
return "NonLocalRegionEntry("+this.key + "; value=" + this.value + "; version=" + this.versionTag;
}
public static NonLocalRegionEntry newEntry() {
final StaticSystemCallbacks sysCb = FactoryStatics.systemCallbacks;
if (sysCb == null) {
return new NonLocalRegionEntry();
}
else {
return sysCb.newNonLocalRegionEntry();
}
}
public static NonLocalRegionEntry newEntry(RegionEntry re,
LocalRegion region, boolean allowTombstones) {
final StaticSystemCallbacks sysCb = FactoryStatics.systemCallbacks;
if (sysCb == null) {
return new NonLocalRegionEntry(re, region, allowTombstones);
}
else {
return sysCb.newNonLocalRegionEntry(re, region, allowTombstones);
}
}
public static NonLocalRegionEntry newEntry(Object key, Object value,
LocalRegion region, VersionTag<?> versionTag) {
final StaticSystemCallbacks sysCb = FactoryStatics.systemCallbacks;
if (sysCb == null) {
return new NonLocalRegionEntry(key, value, region, versionTag);
}
else {
return sysCb.newNonLocalRegionEntry(key, value, region, versionTag);
}
}
public void makeTombstone(LocalRegion r, VersionTag isOperationRemote) {
throw new UnsupportedOperationException();
}
public boolean dispatchListenerEvents(EntryEventImpl event) {
throw new UnsupportedOperationException();
}
public VersionStamp getVersionStamp() {
return this;
}
public boolean hasValidVersion() {
return this.versionTag != null && this.versionTag.hasValidVersion();
}
public void setVersionTimeStamp(long time) {
throw new UnsupportedOperationException();
}
public void processVersionTag(EntryEvent ev) {
throw new UnsupportedOperationException();
}
public NonLocalRegionEntry() {
// for fromData
}
public void toData(DataOutput out) throws IOException {
DataSerializer.writeObject(this.key, out);
DataSerializer.writeObject(this.value, out);
out.writeLong(this.lastModified);
out.writeBoolean(this.isRemoved);
DataSerializer.writeObject(this.versionTag, out);
}
public void fromData(DataInput in) throws IOException,
ClassNotFoundException {
this.key = DataSerializer.readObject(in);
this.value = DataSerializer.readObject(in);
this.lastModified = in.readLong();
this.isRemoved = in.readBoolean();
this.versionTag = (VersionTag)DataSerializer.readObject(in);
}
public long getLastModified() {
return this.lastModified;
}
@Override
public boolean isLockedForCreate() {
return false;
}
public void _setLastModified(long lastModified) {
this.lastModified = lastModified;
}
public void setLastModified(long lastModified) {
this.lastModified = lastModified;
}
public long getLastAccessed() throws StatisticsDisabledException {
return -1;
}
public long getHitCount() throws StatisticsDisabledException {
return -1;
}
public long getMissCount() throws StatisticsDisabledException {
return -1;
}
public boolean isRemoved() {
return this.isRemoved;
}
public boolean isDestroyedOrRemoved() {
return this.isRemoved;
}
public boolean isRemovedPhase2() {
return this.isRemoved;
}
public boolean isTombstone() {
return false;
}
public boolean fillInValue(LocalRegion r,
InitialImageOperation.Entry entry, ByteArrayDataInput in, DM mgr, Version targetVersion) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public boolean isOverflowedToDisk(LocalRegion r, DistributedRegion.DiskPosition dp) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public Object getKey() {
return this.key;
}
public Object getKeyCopy() {
return this.key;
}
public Object getRawKey() {
return this.key;
}
public final Object getValue(RegionEntryContext context) {
return this.value;
}
/** update the value held in this non-local region entry */
void setCachedValue(Object newValue) {
this.value = newValue;
}
// now for the fun part
public void updateStatsForPut(long lastModifiedTime) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void setRecentlyUsed() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void updateStatsForGet(boolean hit, long time)
throws StatisticsDisabledException {
// this method has been made a noop to fix bug 37436
}
public void txDidDestroy(long currTime) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void resetCounts() throws StatisticsDisabledException {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void removePhase1(LocalRegion r, boolean isClear)
{
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void removePhase2()
{
throw new UnsupportedOperationException(
"Not appropriate for PartitionedRegion.NonLocalRegionEntry");
}
public void setValue(RegionEntryContext context, Object value) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public Object _getValue() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public void setOwner(LocalRegion owner) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public Token getValueAsToken() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public Object _getValueRetain(RegionEntryContext context, boolean decompress) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public Object getTransformedValue() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public final Object getValueInVM(RegionEntryContext context) {
return this.value;
}
public Object getValueOnDisk(LocalRegion r) throws EntryNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public boolean initialImagePut(LocalRegion region, long lastModified1,
Object newValue, boolean wasRecovered, boolean versionTagAccepted) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public boolean initialImageInit(LocalRegion region, long lastModified1,
Object newValue, boolean create, boolean wasRecovered, boolean versionTagAccepted) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public boolean destroy(LocalRegion region,
EntryEventImpl event,
boolean inTokenMode,
boolean cacheWrite,
Object expectedOldValue,
boolean forceDestroy,
boolean removeRecoveredEntry)
throws CacheWriterException, EntryNotFoundException, TimeoutException {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
/*
* (non-Javadoc)
*
* @see com.gemstone.gemfire.internal.cache.RegionEntry#getValueOnDiskOrBuffer(com.gemstone.gemfire.internal.cache.LocalRegion)
*/
public Object getValueOnDiskOrBuffer(LocalRegion r)
throws EntryNotFoundException {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.RegionEntry#getSerializedValueOnDisk(com.gemstone.gemfire.internal.cache.LocalRegion)
*/
public Object getSerializedValueOnDisk(LocalRegion localRegion) {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
public boolean hasStats() {
return false;
}
public final Object getValueInVMOrDiskWithoutFaultIn(LocalRegion owner) {
return this.value;
}
@Override
public Object getValueOffHeapOrDiskWithoutFaultIn(LocalRegion owner) {
return this.value;
}
public Object getContainerInfo() {
return null;
}
public Object setContainerInfo(LocalRegion owner, Object val) {
return null;
}
public void setKey(Object key2) {
this.key = key2;
}
/**
* @see ExclusiveSharedLockObject#getOwnerId(Object)
*/
public Object getOwnerId(Object context) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#attemptLock
*/
public boolean attemptLock(LockMode mode, int flags,
LockingPolicy lockPolicy, long msecs, Object owner, Object context) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#releaseLock
*/
public void releaseLock(LockMode mode, boolean releaseAll, Object owner,
Object context) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#numSharedLocks()
*/
public int numSharedLocks() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#numReadOnlyLocks()
*/
public int numReadOnlyLocks() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#hasExclusiveLock(Object, Object)
*/
public boolean hasExclusiveLock(Object owner, Object context) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#hasExclusiveSharedLock(Object, Object)
*/
public boolean hasExclusiveSharedLock(Object owner, Object context) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
/**
* @see ExclusiveSharedLockObject#getState()
*/
@Override
public int getState() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public boolean hasAnyLock() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
// VersionStamp methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.RegionEntry#generateVersionTag(com.gemstone.gemfire.distributed.DistributedMember, boolean)
*/
public VersionTag generateVersionTag(VersionSource member,
boolean isRemoteVersionSource, boolean withDelta, LocalRegion region,
EntryEventImpl event) {
throw new UnsupportedOperationException(); // no text needed - not a customer visible method
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.RegionEntry#concurrencyCheck(com.gemstone.gemfire.internal.cache.LocalRegion, com.gemstone.gemfire.internal.cache.versions.VersionTag, com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember, com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember)
*/
public void processVersionTag(LocalRegion r, VersionTag tag,
InternalDistributedMember thisVM, InternalDistributedMember sender) {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#getEntryVersion()
*/
public int getEntryVersion() {
if (this.versionTag != null) {
return this.versionTag.getEntryVersion();
}
return 0;
}
public long getRegionVersion() {
if (this.versionTag != null) {
return this.versionTag.getRegionVersion();
}
return 0;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#getMemberID()
*/
public VersionSource getMemberID() {
if (this.versionTag != null) {
return this.versionTag.getMemberID();
}
return null;
}
public int getDistributedSystemId() {
if (this.versionTag != null) {
return this.versionTag.getDistributedSystemId();
}
return -1;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#setEntryVersion(int)
*/
public void setVersions(VersionTag tag) {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#setMemberID(com.gemstone.gemfire.distributed.DistributedMember)
*/
public void setMemberID(VersionSource memberID) {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#setPreviousMemberID(com.gemstone.gemfire.distributed.DistributedMember)
*/
public void setPreviousMemberID(DistributedMember previousMemberID) {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#asVersionTag()
*/
public VersionTag asVersionTag() {
return this.versionTag;
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#processVersionTag(com.gemstone.gemfire.internal.cache.LocalRegion, com.gemstone.gemfire.internal.cache.versions.VersionTag, boolean, com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember, com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember)
*/
public void processVersionTag(LocalRegion r, VersionTag tag,
boolean isTombstoneFromGII, boolean hasDelta,
VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) {
throw new UnsupportedOperationException();
}
/* (non-Javadoc)
* @see com.gemstone.gemfire.internal.cache.versions.VersionStamp#getVersionTimeStamp()
*/
@Override
public long getVersionTimeStamp() {
return this.versionTag != null? this.versionTag.getVersionTimeStamp() : 0;
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
public short getRegionVersionHighBytes() {
return this.versionTag != null? this.versionTag.getRegionVersionHighBytes() : 0;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
public int getRegionVersionLowBytes() {
return this.versionTag != null? this.versionTag.getRegionVersionLowBytes() : 0;
}
@Override
public boolean isUpdateInProgress() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public void setUpdateInProgress(boolean underUpdate) {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public boolean isMarkedForEviction() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public void setMarkedForEviction() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public void clearMarkedForEviction() {
throw new UnsupportedOperationException(LocalizedStrings
.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY
.toLocalizedString());
}
@Override
public boolean isValueNull() {
return (null == getValueAsToken());
}
@Override
public boolean isInvalid() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public boolean isDestroyed() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public void setValueToNull() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public boolean isInvalidOrRemoved() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public boolean isDestroyedOrRemovedButNotTombstone() {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public void returnToPool() {
// TODO Auto-generated method stub
}
@Override
public void setValueWithTombstoneCheck(Object value, EntryEvent event)
throws RegionClearedException {
throw new UnsupportedOperationException(LocalizedStrings.PartitionedRegion_NOT_APPROPRIATE_FOR_PARTITIONEDREGIONNONLOCALREGIONENTRY.toLocalizedString());
}
@Override
public Object prepareValueForCache(RegionEntryContext r, Object val, boolean isEntryUpdate, boolean valHasMetadataForGfxdOffHeapUpdate ) {
throw new IllegalStateException("Should never be called");
}
@Override
public boolean isCacheListenerInvocationInProgress() {
// TODO Auto-generated method stub
return false;
}
@Override
public void setCacheListenerInvocationInProgress(boolean isListenerInvoked) {
// TODO Auto-generated method stub
}
}
| |
package by.triumgroup.recourse.service.impl;
import by.triumgroup.recourse.entity.model.Course;
import by.triumgroup.recourse.entity.model.Lesson;
import by.triumgroup.recourse.entity.model.User;
import by.triumgroup.recourse.repository.CourseRepository;
import by.triumgroup.recourse.repository.LessonRepository;
import by.triumgroup.recourse.repository.UserRepository;
import by.triumgroup.recourse.service.CrudService;
import by.triumgroup.recourse.service.CrudServiceTest;
import by.triumgroup.recourse.service.LessonService;
import by.triumgroup.recourse.service.exception.ServiceBadRequestException;
import by.triumgroup.recourse.service.exception.ServiceException;
import by.triumgroup.recourse.supplier.entity.model.EntitySupplier;
import by.triumgroup.recourse.supplier.entity.model.impl.CourseSupplier;
import by.triumgroup.recourse.supplier.entity.model.impl.LessonSupplier;
import by.triumgroup.recourse.supplier.entity.model.impl.UserSupplier;
import by.triumgroup.recourse.validation.validator.LessonTimeValidator;
import org.assertj.core.util.Lists;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.data.util.Pair;
import org.springframework.validation.Errors;
import java.util.List;
import java.util.Optional;
import static by.triumgroup.recourse.util.Util.allItemsPage;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
import static org.mockito.internal.verification.VerificationModeFactory.times;
public class LessonServiceTest extends CrudServiceTest<Lesson, Integer> {
private LessonRepository lessonRepository;
private LessonService lessonService;
private LessonSupplier lessonSupplier;
private CourseSupplier courseSupplier;
private CourseRepository courseRepository;
private UserRepository userRepository;
private UserSupplier userSupplier;
private LessonTimeValidator lessonTimeValidator;
public LessonServiceTest() {
lessonRepository = Mockito.mock(LessonRepository.class);
courseRepository = Mockito.mock(CourseRepository.class);
userRepository = Mockito.mock(UserRepository.class);
lessonTimeValidator = Mockito.mock(LessonTimeValidator.class);
when(lessonTimeValidator.supports(any())).thenCallRealMethod();
lessonService = new LessonServiceImpl(lessonRepository, courseRepository, userRepository, lessonTimeValidator);
lessonSupplier = new LessonSupplier();
userSupplier = new UserSupplier();
courseSupplier = new CourseSupplier();
}
@Test
public void findByExistingCourseId() throws Exception {
when(courseRepository.exists(any())).thenReturn(true);
when(lessonRepository.findByCourseIdOrderByStartTimeAsc(any(), any())).thenReturn(Lists.emptyList());
Optional<List<Lesson>> result = lessonService.findByCourseId(lessonSupplier.getAnyId(), null);
verify(courseRepository, times(1)).exists(any());
verify(lessonRepository, times(1)).findByCourseIdOrderByStartTimeAsc(any(), any());
assertTrue(result.isPresent());
}
@Test
public void findByNotExistingCourseId() throws Exception {
when(courseRepository.exists(any())).thenReturn(false);
Optional<List<Lesson>> result = lessonService.findByCourseId(lessonSupplier.getAnyId(), null);
verify(courseRepository, times(1)).exists(any());
verify(lessonRepository, times(0)).findByCourseIdOrderByStartTimeAsc(any(), any());
Assert.assertFalse(result.isPresent());
}
@Test
public void findByExistingTeacherId() throws Exception {
User teacher = userSupplier.getValidEntityWithId();
teacher.setRole(User.Role.TEACHER);
when(lessonRepository.findByTeacherIdOrderByStartTimeAsc(any(), any())).thenReturn(Lists.emptyList());
when(userRepository.findOne(any())).thenReturn(teacher);
Optional<List<Lesson>> hometask = lessonService.findByTeacherId(userSupplier.getAnyId(), null);
verify(userRepository, times(1)).findOne(any());
verify(lessonRepository, times(1)).findByTeacherIdOrderByStartTimeAsc(any(), any());
Assert.assertTrue(hometask.isPresent());
}
@Test
public void findByNotExistingLessonId() throws Exception {
User student = userSupplier.getValidEntityWithId();
student.setRole(User.Role.STUDENT);
when(userRepository.findOne(any())).thenReturn(student);
Optional<List<Lesson>> hometask = lessonService.findByTeacherId(userSupplier.getAnyId(), null);
verify(userRepository, times(1)).findOne(any());
verify(lessonRepository, times(0)).findByTeacherIdOrderByStartTimeAsc(any(), any());
Assert.assertFalse(hometask.isPresent());
}
@Test
public void findByTeacherAndCourseId() throws Exception {
User teacher = userSupplier.getValidEntityWithId();
teacher.setRole(User.Role.TEACHER);
when(courseRepository.exists(any())).thenReturn(true);
when(userRepository.findOne(any())).thenReturn(teacher);
when(lessonRepository.findByTeacherIdAndCourseIdOrderByStartTimeAsc(any(), any(), any())).thenReturn(Lists.emptyList());
Optional<List<Lesson>> result = lessonService.findByTeacherIdAndCourseId(userSupplier.getAnyId(), userSupplier.getAnyId(), null);
verify(courseRepository, times(1)).exists(any());
verify(userRepository, times(1)).findOne(any());
verify(lessonRepository, times(1)).findByTeacherIdAndCourseIdOrderByStartTimeAsc(any(), any(), any());
Assert.assertTrue(result.isPresent());
}
@Test
public void findByNotExistingTeacherAndCourseId() throws Exception {
User student = userSupplier.getValidEntityWithId();
student.setRole(User.Role.STUDENT);
when(courseRepository.exists(any())).thenReturn(true);
when(userRepository.findOne(any())).thenReturn(student);
Optional<List<Lesson>> result = lessonService.findByTeacherIdAndCourseId(userSupplier.getAnyId(), userSupplier.getAnyId(), null);
verify(courseRepository, times(1)).exists(any());
verify(userRepository, times(1)).findOne(any());
verify(lessonRepository, times(0)).findByTeacherIdAndCourseIdOrderByStartTimeAsc(any(), any(), any());
Assert.assertFalse(result.isPresent());
}
@Test
public void findByTeacherAndNotExistingCourseId() throws Exception {
User teacher = userSupplier.getValidEntityWithId();
teacher.setRole(User.Role.TEACHER);
when(courseRepository.exists(any())).thenReturn(false);
when(userRepository.findOne(any())).thenReturn(teacher);
Optional<List<Lesson>> result = lessonService.findByTeacherIdAndCourseId(userSupplier.getAnyId(), userSupplier.getAnyId(), null);
verify(courseRepository, times(1)).exists(any());
verify(lessonRepository, times(0)).findByTeacherIdAndCourseIdOrderByStartTimeAsc(any(), any(), any());
Assert.assertFalse(result.isPresent());
}
@Test
public void findByNotExistingTeacherAndNotExistingCourseId() throws Exception {
User student = userSupplier.getValidEntityWithId();
student.setRole(User.Role.STUDENT);
when(courseRepository.exists(any())).thenReturn(false);
when(userRepository.findOne(any())).thenReturn(student);
Optional<List<Lesson>> result = lessonService.findByTeacherIdAndCourseId(userSupplier.getAnyId(), userSupplier.getAnyId(), null);
verify(lessonRepository, times(0)).findByTeacherIdAndCourseIdOrderByStartTimeAsc(any(), any(), any());
Assert.assertFalse(result.isPresent());
}
@Test
public void updateLessonWithInvalidTimeTest() throws Exception {
Lesson lesson = lessonSupplier.getValidEntityWithoutId();
Integer parameterId = lessonSupplier.getAnyId();
doAnswer(invocationOnMock -> {
Errors errors = (Errors)invocationOnMock.getArguments()[1];
errors.rejectValue("", "");
return null;
}).when(lessonTimeValidator).validate(any(), any());
when(lessonRepository.exists(parameterId)).thenReturn(true);
when(lessonRepository.findOne(parameterId)).thenReturn(lesson);
setupAllowedRoles(lesson);
thrown.expect(ServiceBadRequestException.class);
lessonService.update(lesson, parameterId, User.Role.ADMIN);
verify(lessonRepository, times(0)).save(lesson);
}
@Override
public void updateEntityWithoutIdTest() throws Exception {
Lesson newEntity = getEntitySupplier().getValidEntityWithoutId();
Lesson databaseEntity = getEntitySupplier().getValidEntityWithoutId();
Integer parameterId = getEntitySupplier().getAnyId();
databaseEntity.setId(parameterId);
when(getCrudRepository().save(newEntity)).thenReturn(newEntity);
when(getCrudRepository().exists(parameterId)).thenReturn(true);
when(getCrudRepository().findOne(parameterId)).thenReturn(databaseEntity);
setupAllowedRoles(newEntity);
Optional<Lesson> actualResult = lessonService.update(newEntity, parameterId, User.Role.ADMIN);
verify(getCrudRepository()).save(captor.capture());
verify(getCrudRepository(), times(1)).save(Matchers.<Lesson>any());
Assert.assertEquals(newEntity, actualResult.orElse(null));
Assert.assertEquals(parameterId, captor.getValue().getId());
}
@Override
public void updateEntityWithDifferentParameterIdTest() throws Exception {
Pair<Integer, Integer> ids = getEntitySupplier().getDifferentIds();
Integer entityId = ids.getFirst();
Integer parameterId = ids.getSecond();
Lesson newEntity = getEntitySupplier().getValidEntityWithoutId();
Lesson databaseEntity = getEntitySupplier().getValidEntityWithoutId();
databaseEntity.setId(parameterId);
newEntity.setId(entityId);
when(getCrudRepository().save(newEntity)).thenReturn(newEntity);
when(getCrudRepository().exists(parameterId)).thenReturn(true);
when(getCrudRepository().findOne(parameterId)).thenReturn(databaseEntity);
setupAllowedRoles(newEntity);
Optional<Lesson> actualResult = lessonService.update(newEntity, parameterId, User.Role.ADMIN);
verify(getCrudRepository()).save(captor.capture());
verify(getCrudRepository(), times(1)).save(Matchers.<Lesson>any());
Assert.assertEquals(newEntity, actualResult.orElse(null));
Assert.assertEquals(parameterId, captor.getValue().getId());
}
@Override
public void updateNotExistingEntityTest() throws Exception {
Lesson entity = getEntitySupplier().getValidEntityWithoutId();
Integer parameterId = getEntitySupplier().getAnyId();
when(getCrudRepository().exists(parameterId)).thenReturn(false);
when(getCrudRepository().findOne(parameterId)).thenReturn(null);
Optional<Lesson> actualResult = lessonService.update(entity, parameterId, User.Role.ADMIN);
verify(getCrudRepository(), times(0)).save(entity);
Assert.assertFalse(actualResult.isPresent());
}
@Override
public void updateEntityExceptionTest() throws Exception {
Lesson entity = getEntitySupplier().getValidEntityWithoutId();
Integer parameterId = getEntitySupplier().getAnyId();
when(getCrudRepository().save(Matchers.<Lesson>any())).thenThrow(new DataIntegrityViolationException(""));
when(getCrudRepository().exists(any())).thenReturn(true);
when(getCrudRepository().findOne(parameterId)).thenReturn(entity);
setupAllowedRoles(entity);
thrown.expect(ServiceException.class);
lessonService.update(entity, parameterId, User.Role.ADMIN);
verify(getCrudRepository(), times(1)).save(Matchers.<Lesson>any());
}
@Test
public void addLessonWithInvalidTimeTest() throws Exception {
Lesson lesson = lessonSupplier.getValidEntityWithId();
doAnswer(invocationOnMock -> {
Errors errors = (Errors)invocationOnMock.getArguments()[1];
errors.rejectValue("", "");
return null;
}).when(lessonTimeValidator).validate(any(), any());
setupAllowedRoles(lesson);
thrown.expect(ServiceBadRequestException.class);
lessonService.add(lesson);
verify(lessonRepository, times(0)).save(lesson);
}
@Override
protected CrudService<Lesson, Integer> getCrudService() {
return lessonService;
}
@Override
protected PagingAndSortingRepository<Lesson, Integer> getCrudRepository() {
return lessonRepository;
}
@Override
protected EntitySupplier<Lesson, Integer> getEntitySupplier() {
return lessonSupplier;
}
@Override
public void findAllEntitiesTest() throws Exception {
when(lessonRepository.findByOrderByStartTimeAsc(allItemsPage())).thenReturn(Lists.newArrayList(getEntitySupplier().getValidEntityWithId()));
List<Lesson> list = Lists.newArrayList(lessonRepository.findByOrderByStartTimeAsc(allItemsPage()));
verify(lessonRepository, times(1)).findByOrderByStartTimeAsc(allItemsPage());
Assert.assertEquals(1, list.size());
}
@Override
public void addValidEntityTest() throws Exception {
Lesson expectedEntity = getEntitySupplier().getValidEntityWithoutId();
Course course = courseSupplier.getValidEntityWithoutId();
course.setId(expectedEntity.getCourseId());
when(getCrudRepository().save(expectedEntity)).thenReturn(expectedEntity);
when(courseRepository.findOne(expectedEntity.getCourseId())).thenReturn(course);
setupAllowedRoles(expectedEntity);
Optional<Lesson> actualResult = getCrudService().add(expectedEntity);
verify(getCrudRepository(), times(1)).save(expectedEntity);
Assert.assertEquals(expectedEntity, actualResult.orElse(null));
}
@Override
public void addEntityWithExistingIdTest() throws Exception {
Lesson entity = getEntitySupplier().getValidEntityWithId();
Course course = courseSupplier.getValidEntityWithoutId();
course.setId(entity.getCourseId());
when(getCrudRepository().save(entity)).thenReturn(entity);
when(courseRepository.findOne(entity.getCourseId())).thenReturn(course);
setupAllowedRoles(entity);
getCrudService().add(entity);
verify(getCrudRepository()).save(captor.capture());
verify(getCrudRepository(), times(1)).save(entity);
Assert.assertNull(captor.getValue().getId());
}
@Override
protected void setupAllowedRoles(Lesson entity) {
Integer teacherId = entity.getTeacher().getId();
when(userRepository.findOne(teacherId)).thenReturn(userSupplier.getWithRole(User.Role.TEACHER));
}
}
| |
/**
*/
package kieker.model.analysismodel.statistics;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
/**
* <!-- begin-user-doc -->
* The <b>Package</b> for the model.
* It contains accessors for the meta objects to represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each operation of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.StatisticsFactory
* @model kind="package"
* @generated
*/
public interface StatisticsPackage extends EPackage {
/**
* The package name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNAME = "statistics";
/**
* The package namespace URI.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_URI = "platform:/resource/Kieker/model/analysismodel.ecore/statistics";
/**
* The package namespace name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_PREFIX = "statistics";
/**
* The singleton instance of the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
StatisticsPackage eINSTANCE = kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl.init();
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.StatisticsImpl <em>Statistics</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticsImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatistics()
* @generated
*/
int STATISTICS = 0;
/**
* The feature id for the '<em><b>Statistics</b></em>' map.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS__STATISTICS = 0;
/**
* The number of structural features of the '<em>Statistics</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS_FEATURE_COUNT = 1;
/**
* The number of operations of the '<em>Statistics</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.UnitsToStatisticsMapEntryImpl <em>Units To Statistics Map Entry</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.UnitsToStatisticsMapEntryImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getUnitsToStatisticsMapEntry()
* @generated
*/
int UNITS_TO_STATISTICS_MAP_ENTRY = 1;
/**
* The feature id for the '<em><b>Key</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNITS_TO_STATISTICS_MAP_ENTRY__KEY = 0;
/**
* The feature id for the '<em><b>Value</b></em>' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNITS_TO_STATISTICS_MAP_ENTRY__VALUE = 1;
/**
* The number of structural features of the '<em>Units To Statistics Map Entry</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNITS_TO_STATISTICS_MAP_ENTRY_FEATURE_COUNT = 2;
/**
* The number of operations of the '<em>Units To Statistics Map Entry</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNITS_TO_STATISTICS_MAP_ENTRY_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.StatisticRecordImpl <em>Statistic Record</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticRecordImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatisticRecord()
* @generated
*/
int STATISTIC_RECORD = 2;
/**
* The feature id for the '<em><b>Properties</b></em>' map.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTIC_RECORD__PROPERTIES = 0;
/**
* The number of structural features of the '<em>Statistic Record</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTIC_RECORD_FEATURE_COUNT = 1;
/**
* The number of operations of the '<em>Statistic Record</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTIC_RECORD_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.EPropertyTypeToValueImpl <em>EProperty Type To Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.EPropertyTypeToValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPropertyTypeToValue()
* @generated
*/
int EPROPERTY_TYPE_TO_VALUE = 3;
/**
* The feature id for the '<em><b>Key</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EPROPERTY_TYPE_TO_VALUE__KEY = 0;
/**
* The feature id for the '<em><b>Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EPROPERTY_TYPE_TO_VALUE__VALUE = 1;
/**
* The number of structural features of the '<em>EProperty Type To Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EPROPERTY_TYPE_TO_VALUE_FEATURE_COUNT = 2;
/**
* The number of operations of the '<em>EProperty Type To Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EPROPERTY_TYPE_TO_VALUE_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.TimeSeriesImpl <em>Time Series</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.TimeSeriesImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getTimeSeries()
* @generated
*/
int TIME_SERIES = 4;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES__NAME = 0;
/**
* The feature id for the '<em><b>Unit</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES__UNIT = 1;
/**
* The feature id for the '<em><b>Values</b></em>' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES__VALUES = 2;
/**
* The number of structural features of the '<em>Time Series</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES_FEATURE_COUNT = 3;
/**
* The number of operations of the '<em>Time Series</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.ValueImpl <em>Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.ValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getValue()
* @generated
*/
int VALUE = 5;
/**
* The feature id for the '<em><b>Timestamp</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int VALUE__TIMESTAMP = 0;
/**
* The number of structural features of the '<em>Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int VALUE_FEATURE_COUNT = 1;
/**
* The number of operations of the '<em>Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int VALUE_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.IntValueImpl <em>Int Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.IntValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getIntValue()
* @generated
*/
int INT_VALUE = 6;
/**
* The feature id for the '<em><b>Timestamp</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int INT_VALUE__TIMESTAMP = VALUE__TIMESTAMP;
/**
* The feature id for the '<em><b>Measurement</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int INT_VALUE__MEASUREMENT = VALUE_FEATURE_COUNT + 0;
/**
* The number of structural features of the '<em>Int Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int INT_VALUE_FEATURE_COUNT = VALUE_FEATURE_COUNT + 1;
/**
* The number of operations of the '<em>Int Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int INT_VALUE_OPERATION_COUNT = VALUE_OPERATION_COUNT + 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.LongValueImpl <em>Long Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.LongValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getLongValue()
* @generated
*/
int LONG_VALUE = 7;
/**
* The feature id for the '<em><b>Timestamp</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int LONG_VALUE__TIMESTAMP = VALUE__TIMESTAMP;
/**
* The feature id for the '<em><b>Measurement</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int LONG_VALUE__MEASUREMENT = VALUE_FEATURE_COUNT + 0;
/**
* The number of structural features of the '<em>Long Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int LONG_VALUE_FEATURE_COUNT = VALUE_FEATURE_COUNT + 1;
/**
* The number of operations of the '<em>Long Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int LONG_VALUE_OPERATION_COUNT = VALUE_OPERATION_COUNT + 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.FloatValueImpl <em>Float Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.FloatValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getFloatValue()
* @generated
*/
int FLOAT_VALUE = 8;
/**
* The feature id for the '<em><b>Timestamp</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int FLOAT_VALUE__TIMESTAMP = VALUE__TIMESTAMP;
/**
* The feature id for the '<em><b>Measurement</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int FLOAT_VALUE__MEASUREMENT = VALUE_FEATURE_COUNT + 0;
/**
* The number of structural features of the '<em>Float Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int FLOAT_VALUE_FEATURE_COUNT = VALUE_FEATURE_COUNT + 1;
/**
* The number of operations of the '<em>Float Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int FLOAT_VALUE_OPERATION_COUNT = VALUE_OPERATION_COUNT + 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.DoubleValueImpl <em>Double Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.DoubleValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getDoubleValue()
* @generated
*/
int DOUBLE_VALUE = 9;
/**
* The feature id for the '<em><b>Timestamp</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DOUBLE_VALUE__TIMESTAMP = VALUE__TIMESTAMP;
/**
* The feature id for the '<em><b>Measurement</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DOUBLE_VALUE__MEASUREMENT = VALUE_FEATURE_COUNT + 0;
/**
* The number of structural features of the '<em>Double Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DOUBLE_VALUE_FEATURE_COUNT = VALUE_FEATURE_COUNT + 1;
/**
* The number of operations of the '<em>Double Value</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int DOUBLE_VALUE_OPERATION_COUNT = VALUE_OPERATION_COUNT + 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.Unit <em>Unit</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.Unit
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getUnit()
* @generated
*/
int UNIT = 10;
/**
* The number of structural features of the '<em>Unit</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNIT_FEATURE_COUNT = 0;
/**
* The number of operations of the '<em>Unit</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int UNIT_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.StatisticsModelImpl <em>Model</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticsModelImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatisticsModel()
* @generated
*/
int STATISTICS_MODEL = 11;
/**
* The feature id for the '<em><b>Statistics</b></em>' map.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS_MODEL__STATISTICS = 0;
/**
* The number of structural features of the '<em>Model</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS_MODEL_FEATURE_COUNT = 1;
/**
* The number of operations of the '<em>Model</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int STATISTICS_MODEL_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.EObjectToStatisticsMapEntryImpl <em>EObject To Statistics Map Entry</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.EObjectToStatisticsMapEntryImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEObjectToStatisticsMapEntry()
* @generated
*/
int EOBJECT_TO_STATISTICS_MAP_ENTRY = 12;
/**
* The feature id for the '<em><b>Value</b></em>' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EOBJECT_TO_STATISTICS_MAP_ENTRY__VALUE = 0;
/**
* The feature id for the '<em><b>Key</b></em>' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EOBJECT_TO_STATISTICS_MAP_ENTRY__KEY = 1;
/**
* The number of structural features of the '<em>EObject To Statistics Map Entry</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EOBJECT_TO_STATISTICS_MAP_ENTRY_FEATURE_COUNT = 2;
/**
* The number of operations of the '<em>EObject To Statistics Map Entry</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int EOBJECT_TO_STATISTICS_MAP_ENTRY_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.impl.TimeSeriesStatisticsImpl <em>Time Series Statistics</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.TimeSeriesStatisticsImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getTimeSeriesStatistics()
* @generated
*/
int TIME_SERIES_STATISTICS = 13;
/**
* The feature id for the '<em><b>Time Series</b></em>' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES_STATISTICS__TIME_SERIES = 0;
/**
* The number of structural features of the '<em>Time Series Statistics</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES_STATISTICS_FEATURE_COUNT = 1;
/**
* The number of operations of the '<em>Time Series Statistics</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int TIME_SERIES_STATISTICS_OPERATION_COUNT = 0;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.EPredefinedUnits <em>EPredefined Units</em>}' enum.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.EPredefinedUnits
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPredefinedUnits()
* @generated
*/
int EPREDEFINED_UNITS = 14;
/**
* The meta object id for the '{@link kieker.model.analysismodel.statistics.EPropertyType <em>EProperty Type</em>}' enum.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.EPropertyType
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPropertyType()
* @generated
*/
int EPROPERTY_TYPE = 15;
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.Statistics <em>Statistics</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Statistics</em>'.
* @see kieker.model.analysismodel.statistics.Statistics
* @generated
*/
EClass getStatistics();
/**
* Returns the meta object for the map '{@link kieker.model.analysismodel.statistics.Statistics#getStatistics <em>Statistics</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the map '<em>Statistics</em>'.
* @see kieker.model.analysismodel.statistics.Statistics#getStatistics()
* @see #getStatistics()
* @generated
*/
EReference getStatistics_Statistics();
/**
* Returns the meta object for class '{@link java.util.Map.Entry <em>Units To Statistics Map Entry</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Units To Statistics Map Entry</em>'.
* @see java.util.Map.Entry
* @model keyDataType="kieker.model.analysismodel.statistics.EPredefinedUnits"
* valueType="kieker.model.analysismodel.statistics.StatisticRecord" valueContainment="true"
* @generated
*/
EClass getUnitsToStatisticsMapEntry();
/**
* Returns the meta object for the attribute '{@link java.util.Map.Entry <em>Key</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Key</em>'.
* @see java.util.Map.Entry
* @see #getUnitsToStatisticsMapEntry()
* @generated
*/
EAttribute getUnitsToStatisticsMapEntry_Key();
/**
* Returns the meta object for the containment reference '{@link java.util.Map.Entry <em>Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Value</em>'.
* @see java.util.Map.Entry
* @see #getUnitsToStatisticsMapEntry()
* @generated
*/
EReference getUnitsToStatisticsMapEntry_Value();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.StatisticRecord <em>Statistic Record</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Statistic Record</em>'.
* @see kieker.model.analysismodel.statistics.StatisticRecord
* @generated
*/
EClass getStatisticRecord();
/**
* Returns the meta object for the map '{@link kieker.model.analysismodel.statistics.StatisticRecord#getProperties <em>Properties</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the map '<em>Properties</em>'.
* @see kieker.model.analysismodel.statistics.StatisticRecord#getProperties()
* @see #getStatisticRecord()
* @generated
*/
EReference getStatisticRecord_Properties();
/**
* Returns the meta object for class '{@link java.util.Map.Entry <em>EProperty Type To Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>EProperty Type To Value</em>'.
* @see java.util.Map.Entry
* @model keyDataType="kieker.model.analysismodel.statistics.EPropertyType"
* valueDataType="org.eclipse.emf.ecore.EJavaObject"
* @generated
*/
EClass getEPropertyTypeToValue();
/**
* Returns the meta object for the attribute '{@link java.util.Map.Entry <em>Key</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Key</em>'.
* @see java.util.Map.Entry
* @see #getEPropertyTypeToValue()
* @generated
*/
EAttribute getEPropertyTypeToValue_Key();
/**
* Returns the meta object for the attribute '{@link java.util.Map.Entry <em>Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Value</em>'.
* @see java.util.Map.Entry
* @see #getEPropertyTypeToValue()
* @generated
*/
EAttribute getEPropertyTypeToValue_Value();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.TimeSeries <em>Time Series</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Time Series</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeries
* @generated
*/
EClass getTimeSeries();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.TimeSeries#getName <em>Name</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Name</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeries#getName()
* @see #getTimeSeries()
* @generated
*/
EAttribute getTimeSeries_Name();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.TimeSeries#getUnit <em>Unit</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Unit</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeries#getUnit()
* @see #getTimeSeries()
* @generated
*/
EAttribute getTimeSeries_Unit();
/**
* Returns the meta object for the reference list '{@link kieker.model.analysismodel.statistics.TimeSeries#getValues <em>Values</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference list '<em>Values</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeries#getValues()
* @see #getTimeSeries()
* @generated
*/
EReference getTimeSeries_Values();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.Value <em>Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Value</em>'.
* @see kieker.model.analysismodel.statistics.Value
* @generated
*/
EClass getValue();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.Value#getTimestamp <em>Timestamp</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Timestamp</em>'.
* @see kieker.model.analysismodel.statistics.Value#getTimestamp()
* @see #getValue()
* @generated
*/
EAttribute getValue_Timestamp();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.IntValue <em>Int Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Int Value</em>'.
* @see kieker.model.analysismodel.statistics.IntValue
* @generated
*/
EClass getIntValue();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.IntValue#getMeasurement <em>Measurement</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Measurement</em>'.
* @see kieker.model.analysismodel.statistics.IntValue#getMeasurement()
* @see #getIntValue()
* @generated
*/
EAttribute getIntValue_Measurement();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.LongValue <em>Long Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Long Value</em>'.
* @see kieker.model.analysismodel.statistics.LongValue
* @generated
*/
EClass getLongValue();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.LongValue#getMeasurement <em>Measurement</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Measurement</em>'.
* @see kieker.model.analysismodel.statistics.LongValue#getMeasurement()
* @see #getLongValue()
* @generated
*/
EAttribute getLongValue_Measurement();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.FloatValue <em>Float Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Float Value</em>'.
* @see kieker.model.analysismodel.statistics.FloatValue
* @generated
*/
EClass getFloatValue();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.FloatValue#getMeasurement <em>Measurement</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Measurement</em>'.
* @see kieker.model.analysismodel.statistics.FloatValue#getMeasurement()
* @see #getFloatValue()
* @generated
*/
EAttribute getFloatValue_Measurement();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.DoubleValue <em>Double Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Double Value</em>'.
* @see kieker.model.analysismodel.statistics.DoubleValue
* @generated
*/
EClass getDoubleValue();
/**
* Returns the meta object for the attribute '{@link kieker.model.analysismodel.statistics.DoubleValue#getMeasurement <em>Measurement</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Measurement</em>'.
* @see kieker.model.analysismodel.statistics.DoubleValue#getMeasurement()
* @see #getDoubleValue()
* @generated
*/
EAttribute getDoubleValue_Measurement();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.Unit <em>Unit</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Unit</em>'.
* @see kieker.model.analysismodel.statistics.Unit
* @generated
*/
EClass getUnit();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.StatisticsModel <em>Model</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Model</em>'.
* @see kieker.model.analysismodel.statistics.StatisticsModel
* @generated
*/
EClass getStatisticsModel();
/**
* Returns the meta object for the map '{@link kieker.model.analysismodel.statistics.StatisticsModel#getStatistics <em>Statistics</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the map '<em>Statistics</em>'.
* @see kieker.model.analysismodel.statistics.StatisticsModel#getStatistics()
* @see #getStatisticsModel()
* @generated
*/
EReference getStatisticsModel_Statistics();
/**
* Returns the meta object for class '{@link java.util.Map.Entry <em>EObject To Statistics Map Entry</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>EObject To Statistics Map Entry</em>'.
* @see java.util.Map.Entry
* @model features="value key"
* valueType="kieker.model.analysismodel.statistics.Statistics" valueContainment="true"
* keyType="org.eclipse.emf.ecore.EObject"
* @generated
*/
EClass getEObjectToStatisticsMapEntry();
/**
* Returns the meta object for the containment reference '{@link java.util.Map.Entry <em>Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Value</em>'.
* @see java.util.Map.Entry
* @see #getEObjectToStatisticsMapEntry()
* @generated
*/
EReference getEObjectToStatisticsMapEntry_Value();
/**
* Returns the meta object for the reference '{@link java.util.Map.Entry <em>Key</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Key</em>'.
* @see java.util.Map.Entry
* @see #getEObjectToStatisticsMapEntry()
* @generated
*/
EReference getEObjectToStatisticsMapEntry_Key();
/**
* Returns the meta object for class '{@link kieker.model.analysismodel.statistics.TimeSeriesStatistics <em>Time Series Statistics</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Time Series Statistics</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeriesStatistics
* @generated
*/
EClass getTimeSeriesStatistics();
/**
* Returns the meta object for the reference list '{@link kieker.model.analysismodel.statistics.TimeSeriesStatistics#getTimeSeries <em>Time Series</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference list '<em>Time Series</em>'.
* @see kieker.model.analysismodel.statistics.TimeSeriesStatistics#getTimeSeries()
* @see #getTimeSeriesStatistics()
* @generated
*/
EReference getTimeSeriesStatistics_TimeSeries();
/**
* Returns the meta object for enum '{@link kieker.model.analysismodel.statistics.EPredefinedUnits <em>EPredefined Units</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for enum '<em>EPredefined Units</em>'.
* @see kieker.model.analysismodel.statistics.EPredefinedUnits
* @generated
*/
EEnum getEPredefinedUnits();
/**
* Returns the meta object for enum '{@link kieker.model.analysismodel.statistics.EPropertyType <em>EProperty Type</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for enum '<em>EProperty Type</em>'.
* @see kieker.model.analysismodel.statistics.EPropertyType
* @generated
*/
EEnum getEPropertyType();
/**
* Returns the factory that creates the instances of the model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the factory that creates the instances of the model.
* @generated
*/
StatisticsFactory getStatisticsFactory();
/**
* <!-- begin-user-doc -->
* Defines literals for the meta objects that represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each operation of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @generated
*/
interface Literals {
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.StatisticsImpl <em>Statistics</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticsImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatistics()
* @generated
*/
EClass STATISTICS = eINSTANCE.getStatistics();
/**
* The meta object literal for the '<em><b>Statistics</b></em>' map feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference STATISTICS__STATISTICS = eINSTANCE.getStatistics_Statistics();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.UnitsToStatisticsMapEntryImpl <em>Units To Statistics Map Entry</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.UnitsToStatisticsMapEntryImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getUnitsToStatisticsMapEntry()
* @generated
*/
EClass UNITS_TO_STATISTICS_MAP_ENTRY = eINSTANCE.getUnitsToStatisticsMapEntry();
/**
* The meta object literal for the '<em><b>Key</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute UNITS_TO_STATISTICS_MAP_ENTRY__KEY = eINSTANCE.getUnitsToStatisticsMapEntry_Key();
/**
* The meta object literal for the '<em><b>Value</b></em>' containment reference feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference UNITS_TO_STATISTICS_MAP_ENTRY__VALUE = eINSTANCE.getUnitsToStatisticsMapEntry_Value();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.StatisticRecordImpl <em>Statistic Record</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticRecordImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatisticRecord()
* @generated
*/
EClass STATISTIC_RECORD = eINSTANCE.getStatisticRecord();
/**
* The meta object literal for the '<em><b>Properties</b></em>' map feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference STATISTIC_RECORD__PROPERTIES = eINSTANCE.getStatisticRecord_Properties();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.EPropertyTypeToValueImpl <em>EProperty Type To Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.EPropertyTypeToValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPropertyTypeToValue()
* @generated
*/
EClass EPROPERTY_TYPE_TO_VALUE = eINSTANCE.getEPropertyTypeToValue();
/**
* The meta object literal for the '<em><b>Key</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute EPROPERTY_TYPE_TO_VALUE__KEY = eINSTANCE.getEPropertyTypeToValue_Key();
/**
* The meta object literal for the '<em><b>Value</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute EPROPERTY_TYPE_TO_VALUE__VALUE = eINSTANCE.getEPropertyTypeToValue_Value();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.TimeSeriesImpl <em>Time Series</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.TimeSeriesImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getTimeSeries()
* @generated
*/
EClass TIME_SERIES = eINSTANCE.getTimeSeries();
/**
* The meta object literal for the '<em><b>Name</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute TIME_SERIES__NAME = eINSTANCE.getTimeSeries_Name();
/**
* The meta object literal for the '<em><b>Unit</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute TIME_SERIES__UNIT = eINSTANCE.getTimeSeries_Unit();
/**
* The meta object literal for the '<em><b>Values</b></em>' reference list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference TIME_SERIES__VALUES = eINSTANCE.getTimeSeries_Values();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.ValueImpl <em>Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.ValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getValue()
* @generated
*/
EClass VALUE = eINSTANCE.getValue();
/**
* The meta object literal for the '<em><b>Timestamp</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute VALUE__TIMESTAMP = eINSTANCE.getValue_Timestamp();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.IntValueImpl <em>Int Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.IntValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getIntValue()
* @generated
*/
EClass INT_VALUE = eINSTANCE.getIntValue();
/**
* The meta object literal for the '<em><b>Measurement</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute INT_VALUE__MEASUREMENT = eINSTANCE.getIntValue_Measurement();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.LongValueImpl <em>Long Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.LongValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getLongValue()
* @generated
*/
EClass LONG_VALUE = eINSTANCE.getLongValue();
/**
* The meta object literal for the '<em><b>Measurement</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute LONG_VALUE__MEASUREMENT = eINSTANCE.getLongValue_Measurement();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.FloatValueImpl <em>Float Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.FloatValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getFloatValue()
* @generated
*/
EClass FLOAT_VALUE = eINSTANCE.getFloatValue();
/**
* The meta object literal for the '<em><b>Measurement</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute FLOAT_VALUE__MEASUREMENT = eINSTANCE.getFloatValue_Measurement();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.DoubleValueImpl <em>Double Value</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.DoubleValueImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getDoubleValue()
* @generated
*/
EClass DOUBLE_VALUE = eINSTANCE.getDoubleValue();
/**
* The meta object literal for the '<em><b>Measurement</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute DOUBLE_VALUE__MEASUREMENT = eINSTANCE.getDoubleValue_Measurement();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.Unit <em>Unit</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.Unit
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getUnit()
* @generated
*/
EClass UNIT = eINSTANCE.getUnit();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.StatisticsModelImpl <em>Model</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.StatisticsModelImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getStatisticsModel()
* @generated
*/
EClass STATISTICS_MODEL = eINSTANCE.getStatisticsModel();
/**
* The meta object literal for the '<em><b>Statistics</b></em>' map feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference STATISTICS_MODEL__STATISTICS = eINSTANCE.getStatisticsModel_Statistics();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.EObjectToStatisticsMapEntryImpl <em>EObject To Statistics Map Entry</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.EObjectToStatisticsMapEntryImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEObjectToStatisticsMapEntry()
* @generated
*/
EClass EOBJECT_TO_STATISTICS_MAP_ENTRY = eINSTANCE.getEObjectToStatisticsMapEntry();
/**
* The meta object literal for the '<em><b>Value</b></em>' containment reference feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference EOBJECT_TO_STATISTICS_MAP_ENTRY__VALUE = eINSTANCE.getEObjectToStatisticsMapEntry_Value();
/**
* The meta object literal for the '<em><b>Key</b></em>' reference feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference EOBJECT_TO_STATISTICS_MAP_ENTRY__KEY = eINSTANCE.getEObjectToStatisticsMapEntry_Key();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.impl.TimeSeriesStatisticsImpl <em>Time Series Statistics</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.impl.TimeSeriesStatisticsImpl
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getTimeSeriesStatistics()
* @generated
*/
EClass TIME_SERIES_STATISTICS = eINSTANCE.getTimeSeriesStatistics();
/**
* The meta object literal for the '<em><b>Time Series</b></em>' reference list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference TIME_SERIES_STATISTICS__TIME_SERIES = eINSTANCE.getTimeSeriesStatistics_TimeSeries();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.EPredefinedUnits <em>EPredefined Units</em>}' enum.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.EPredefinedUnits
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPredefinedUnits()
* @generated
*/
EEnum EPREDEFINED_UNITS = eINSTANCE.getEPredefinedUnits();
/**
* The meta object literal for the '{@link kieker.model.analysismodel.statistics.EPropertyType <em>EProperty Type</em>}' enum.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see kieker.model.analysismodel.statistics.EPropertyType
* @see kieker.model.analysismodel.statistics.impl.StatisticsPackageImpl#getEPropertyType()
* @generated
*/
EEnum EPROPERTY_TYPE = eINSTANCE.getEPropertyType();
}
} //StatisticsPackage
| |
/*
* Copyright 2014-2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.archiver;
import io.aeron.archiver.codecs.RecordingDescriptorDecoder;
import io.aeron.archiver.codecs.RecordingDescriptorEncoder;
import io.aeron.logbuffer.FrameDescriptor;
import io.aeron.protocol.DataHeaderFlyweight;
import org.agrona.BufferUtil;
import org.agrona.CloseHelper;
import org.agrona.LangUtil;
import org.agrona.concurrent.UnsafeBuffer;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import static io.aeron.archiver.ArchiveUtil.*;
import static io.aeron.archiver.RecordingWriter.NULL_TIME;
import static io.aeron.archiver.RecordingWriter.initDescriptor;
import static java.nio.file.StandardOpenOption.*;
import static org.agrona.BitUtil.align;
import static org.agrona.BufferUtil.allocateDirectAligned;
/**
* Catalog for the archive keeps details of recorded images, past and present, and used for browsing.
* The format is simple, allocating a fixed 4KB record for each record descriptor. This allows offset
* based look up of a descriptor in the file.
*/
class Catalog implements AutoCloseable
{
private static final String CATALOG_FILE_NAME = "archive.cat";
static final int RECORD_LENGTH = 4096;
static final int CATALOG_FRAME_LENGTH = DataHeaderFlyweight.HEADER_LENGTH;
static final int NULL_RECORD_ID = -1;
private static final int PAGE_SIZE = 4096;
private final RecordingDescriptorEncoder recordingDescriptorEncoder = new RecordingDescriptorEncoder();
private final ByteBuffer byteBuffer = BufferUtil.allocateDirectAligned(RECORD_LENGTH, PAGE_SIZE);
private final UnsafeBuffer unsafeBuffer = new UnsafeBuffer(byteBuffer);
private final FileChannel catalogFileChannel;
private final File archiveDir;
private long nextRecordingId = 0;
Catalog(final File archiveDir)
{
this.archiveDir = archiveDir;
recordingDescriptorEncoder.wrap(unsafeBuffer, CATALOG_FRAME_LENGTH);
final File catalogFile = new File(archiveDir, CATALOG_FILE_NAME);
FileChannel channel = null;
try
{
channel = FileChannel.open(catalogFile.toPath(), CREATE, READ, WRITE, DSYNC);
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
finally
{
catalogFileChannel = channel;
}
sanitizeCatalog();
}
public void close()
{
CloseHelper.close(catalogFileChannel);
}
long addNewRecording(
final int sessionId,
final int streamId,
final String channel,
final String sourceIdentity,
final int termBufferLength,
final int mtuLength,
final int imageInitialTermId,
final long joinPosition,
final int segmentFileLength)
{
final long newRecordingId = nextRecordingId;
recordingDescriptorEncoder.limit(CATALOG_FRAME_LENGTH + RecordingDescriptorEncoder.BLOCK_LENGTH);
initDescriptor(
recordingDescriptorEncoder,
newRecordingId,
termBufferLength,
segmentFileLength,
mtuLength,
imageInitialTermId,
joinPosition,
sessionId,
streamId,
channel,
sourceIdentity);
final int encodedLength = recordingDescriptorEncoder.encodedLength();
unsafeBuffer.putInt(0, encodedLength);
try
{
byteBuffer.clear();
final int written = catalogFileChannel.write(byteBuffer, newRecordingId * RECORD_LENGTH);
if (written != RECORD_LENGTH)
{
throw new IllegalStateException();
}
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
nextRecordingId++;
return newRecordingId;
}
boolean readDescriptor(final long recordingId, final ByteBuffer buffer)
throws IOException
{
if (buffer.remaining() != RECORD_LENGTH)
{
throw new IllegalArgumentException("buffer must have exactly RECORD_LENGTH remaining to read into");
}
final int read = catalogFileChannel.read(buffer, recordingId * RECORD_LENGTH);
if (read == 0 || read == -1)
{
return false;
}
if (read != RECORD_LENGTH)
{
throw new IllegalStateException("Wrong read size:" + read);
}
return true;
}
void updateRecordingMetaDataInCatalog(
final long recordingId,
final long endPosition,
final long joinTimestamp,
final long endTimestamp) throws IOException
{
byteBuffer.clear();
if (!readDescriptor(recordingId, byteBuffer))
{
throw new IllegalArgumentException("Invalid recording id : " + recordingId);
}
recordingDescriptorEncoder
.wrap(unsafeBuffer, CATALOG_FRAME_LENGTH)
.endPosition(endPosition)
.joinTimestamp(joinTimestamp)
.endTimestamp(endTimestamp);
byteBuffer.clear();
catalogFileChannel.write(byteBuffer, recordingId * RECORD_LENGTH);
}
long nextRecordingId()
{
return nextRecordingId;
}
private void sanitizeCatalog()
{
try
{
final RecordingDescriptorDecoder decoder = new RecordingDescriptorDecoder();
while (catalogFileChannel.read(byteBuffer, nextRecordingId * RECORD_LENGTH) != -1)
{
byteBuffer.flip();
if (byteBuffer.remaining() == 0)
{
break;
}
if (byteBuffer.remaining() != RECORD_LENGTH)
{
throw new IllegalStateException();
}
byteBuffer.clear();
decoder.wrap(
unsafeBuffer,
CATALOG_FRAME_LENGTH,
RecordingDescriptorDecoder.BLOCK_LENGTH,
RecordingDescriptorDecoder.SCHEMA_VERSION);
sanitizeCatalogEntry(decoder, nextRecordingId);
nextRecordingId++;
byteBuffer.clear();
}
}
catch (final IOException ex)
{
CloseHelper.quietClose(catalogFileChannel);
LangUtil.rethrowUnchecked(ex);
}
}
private void sanitizeCatalogEntry(
final RecordingDescriptorDecoder catalogRecordingDescriptor,
final long recordingId) throws IOException
{
if (recordingId != catalogRecordingDescriptor.recordingId())
{
throw new IllegalStateException("Expecting recordingId: " + recordingId +
" but found: " + catalogRecordingDescriptor.recordingId());
}
if (catalogRecordingDescriptor.endTimestamp() != NULL_TIME)
{
// TODO: do we want to confirm all files for an entry exist? are valid?
return;
}
// On load from a clean shutdown all catalog entries should have a valid end time. If we see a NULL_TIME we
// need to patch up the entry or declare the index unusable
if (catalogRecordingDescriptor.endTimestamp() == NULL_TIME)
{
final File metaFile = new File(archiveDir, recordingMetaFileName(recordingId));
final RecordingDescriptorDecoder fileRecordingDescriptor = loadRecordingDescriptor(metaFile);
if (fileRecordingDescriptor.endTimestamp() != NULL_TIME)
{
// metafile has an end time -> it concluded recording, update catalog entry
updateRecordingMetaDataInCatalog(recordingId,
fileRecordingDescriptor.endPosition(),
fileRecordingDescriptor.joinTimestamp(),
fileRecordingDescriptor.endTimestamp());
return;
}
recoverIncompleteMetaData(recordingId, metaFile, fileRecordingDescriptor);
}
}
private void recoverIncompleteMetaData(
final long recordingId,
final File metaFile,
final RecordingDescriptorDecoder fileRecordingDescriptor) throws IOException
{
// last metadata update failed -> look in the recording files for end position
final int maxSegment = findRecordingLastSegment(recordingId);
// there are no segments
final long endPosition;
final long joinTimestamp;
final long endTimestamp;
if (maxSegment == -1)
{
// no segments found, no data written, update catalog and meta file with the last modified TS
endPosition = fileRecordingDescriptor.joinPosition();
joinTimestamp = fileRecordingDescriptor.joinTimestamp();
endTimestamp = metaFile.lastModified();
}
else
{
if (fileRecordingDescriptor.joinTimestamp() == NULL_TIME)
{
throw new IllegalStateException("joinTimestamp is NULL_TIME, but 1 or more segment files found");
}
else
{
joinTimestamp = fileRecordingDescriptor.joinTimestamp();
}
final File segmentFile = new File(archiveDir, recordingDataFileName(recordingId, maxSegment));
final ByteBuffer headerBuffer =
allocateDirectAligned(DataHeaderFlyweight.HEADER_LENGTH, FrameDescriptor.FRAME_ALIGNMENT);
final DataHeaderFlyweight headerFlyweight = new DataHeaderFlyweight(headerBuffer);
try (FileChannel segmentFileChannel = FileChannel.open(segmentFile.toPath(), READ))
{
final long joinPosition = fileRecordingDescriptor.joinPosition();
// validate initial padding frame in first file
if (joinPosition != 0 && maxSegment == 0)
{
validateFirstWritePreamble(
recordingId,
headerBuffer,
headerFlyweight,
segmentFileChannel,
joinPosition);
}
// chase frames to END_OF_DATA/RECORDING
endPosition = readToEndPosition(
headerBuffer,
headerFlyweight,
segmentFileChannel
);
// correct recording final terminal marker if not found
if (headerFlyweight.frameLength() != RecordingWriter.END_OF_RECORDING_INDICATOR)
{
headerFlyweight.frameLength(RecordingWriter.END_OF_RECORDING_INDICATOR);
segmentFileChannel.write(headerBuffer, endPosition);
}
}
endTimestamp = segmentFile.lastModified();
}
updateRecordingMetaDataInCatalog(recordingId, endPosition, joinTimestamp, endTimestamp);
updateRecordingMetaDataFile(metaFile, endPosition, joinTimestamp, endTimestamp);
}
private void updateRecordingMetaDataFile(
final File metaDataFile,
final long endPosition,
final long joinTimestamp,
final long endTimestamp) throws IOException
{
try (FileChannel metaDataFileChannel = FileChannel.open(metaDataFile.toPath(), WRITE, READ))
{
byteBuffer.clear();
metaDataFileChannel.read(byteBuffer);
recordingDescriptorEncoder
.wrap(unsafeBuffer, CATALOG_FRAME_LENGTH)
.endPosition(endPosition)
.joinTimestamp(joinTimestamp)
.endTimestamp(endTimestamp);
byteBuffer.clear();
metaDataFileChannel.write(byteBuffer, 0);
}
}
private void validateFirstWritePreamble(
final long recordingId,
final ByteBuffer headerBuffer,
final DataHeaderFlyweight headerFlyweight,
final FileChannel segmentFileChannel,
final long joinPosition) throws IOException
{
segmentFileChannel.read(headerBuffer, 0);
headerBuffer.clear();
final int headerType = headerFlyweight.headerType();
if (headerType != DataHeaderFlyweight.HDR_TYPE_PAD)
{
throw new IllegalStateException("Recording : " + recordingId + " segment 0 is corrupt" +
". Expected a padding frame as join position is non-zero, but type is:" +
headerType);
}
final int frameLength = headerFlyweight.frameLength();
if (frameLength != joinPosition)
{
throw new IllegalStateException("Recording : " + recordingId + " segment 0 is corrupt" +
". Expected a padding frame with a length equal to join position, but length is:" +
headerType);
}
}
private long readToEndPosition(
final ByteBuffer headerBuffer,
final DataHeaderFlyweight headerFlyweight,
final FileChannel segmentFileChannel) throws IOException
{
long endPosition = 0;
// first read required before examining the flyweight value
segmentFileChannel.read(headerBuffer, endPosition);
headerBuffer.clear();
while (headerFlyweight.frameLength() > 0)
{
endPosition += align(headerFlyweight.frameLength(), FrameDescriptor.FRAME_ALIGNMENT);
segmentFileChannel.read(headerBuffer, endPosition);
headerBuffer.clear();
}
return endPosition;
}
private int findRecordingLastSegment(final long recordingId)
{
int maxSegment = -1;
final String[] recordingSegments = ArchiveUtil.listRecordingSegments(archiveDir, recordingId);
for (final String segmentName : recordingSegments)
{
final int segmentIndex = ArchiveUtil.segmentIndexFromFileName(segmentName);
maxSegment = Math.max(maxSegment, segmentIndex);
}
return maxSegment;
}
}
| |
package com.jpixel.image;
import com.jpixel.math.MathUtils;
/**
* A container for 2D pixel data. The class includes some useful functions to
* write color/image data.
*
* @author Denis Zhidkikh
* @version 1.2
* @since 26.10.2013
*/
public class Bitmap {
protected int width, height;
/**
* Raw pixel data.
*/
public int[] pixels;
/**
* Initializes an empty bitmap.
*
* @param width Width of the bitmap.
* @param height Height of the bitmap.
*/
public Bitmap(int width, int height) {
this.width = width;
this.height = height;
pixels = new int[width * height];
}
/**
* Fills the whole bitmap with a single color, thus clearing the bitmap.
*
* @param color Color to fill with.
*/
public void clear(int color) {
for (int i = 0; i < pixels.length; i++)
pixels[i] = color;
}
/**
* Fills an area of the bitmap with a single color.
*
* @param color Color to fill with.
* @param x Position where to begin filling (X coordinate).
* @param y Position where to begin filling (Y coordinate).
* @param w Width of the area to fill.
* @param h Height of the area to fill.
*/
public void fill(int color, int x, int y, int w, int h) {
int x0 = x < 0 ? 0 : x;
int x1 = x0 + w;
int y0 = y < 0 ? 0 : y;
int y1 = y0 + h;
if (y1 >= height) y1 = height - 1;
if (x1 >= width) x1 = width - 1;
for (int yp = y0; yp <= y1; yp++)
for (int xp = x0; xp <= x1; xp++)
pixels[xp + yp * width] = color;
}
/**
* Performs a Block Image Transfer (blit) operation. Copies all of the pixel data from given bitmap onto this one.
*
* @param b Bitmap to copy.
* @param xStart X coordinate position on this bitmap to which begin copying. Can be negative.
* @param yStart Y coordinate position on this bitmap to which begin copying. Can be negative.
*/
public void blit(Bitmap b, int xStart, int yStart) {
int x0 = xStart < 0 ? 0 : xStart;
int x1 = xStart + b.width;
int y0 = yStart < 0 ? 0 : yStart;
int y1 = yStart + b.height;
if (y1 > height) y1 = height;
if (x1 > width) x1 = width;
for (int yp = y0; yp < y1; yp++) {
int tp = yp * width;
int bp = (yp - yStart) * b.width - xStart;
for (int xp = x0; xp < x1; xp++) {
pixels[xp + tp] = b.pixels[xp + bp];
}
}
}
/**
* Performs a Block Image Transfer (blit) operation. Copies part of some part of pixel data from the given bitmap onto this one.
*
* @param b Bitmap to copy.
* @param xStart X coordinate position on this bitmap to which begin copying.
* @param yStart Y coordinate position on this bitmap to which begin copying.
* @param xb X coordinate position on the given bitmap from which begin copying.
* @param yb Y coordinate position on the given bitmap from which begin copying.
* @param w Width of the area to copy.
* @param h Height of the area to copy.
*/
public void blit(Bitmap b, int xStart, int yStart, int xb, int yb, int w, int h) {
if (w < 0 || h < 0) return;
if (w > b.width - xb) w = b.width - xb;
if (h > b.height - yb) h = b.height - yb;
if (!(xb >= 0 && yb >= 0 && xb <= w && yb <= h)) return;
int x0 = xStart < 0 ? 0 : xStart;
int x1 = xStart + w;
int y0 = yStart < 0 ? 0 : yStart;
int y1 = yStart + h;
if (y1 > height) y1 = height;
if (x1 > width) x1 = width;
for (int yp = y0; yp < y1; yp++) {
int tp = yp * width;
int sp = (yp - yStart + yb) * b.width - (xStart - xb);
for (int xp = x0; xp < x1; xp++) {
pixels[tp + xp] = b.pixels[xp + sp];
}
}
}
/**
* Performs a Block Image Transfer (blit) operation. Copies all of the pixel data (except for the alpha colour 0x00000000).
*
* @param b Bitmap to copy.
* @param xStart X coordinate position on this bitmap to which begin copying. Can be negative.
* @param yStart Y coordinate position on this bitmap to which begin copying. Can be negative.
*/
public void blitAlpha(Bitmap b, int xStart, int yStart) {
int x0 = xStart < 0 ? 0 : xStart;
int x1 = xStart + b.width;
int y0 = yStart < 0 ? 0 : yStart;
int y1 = yStart + b.height;
if (y1 > height) y1 = height;
if (x1 > width) x1 = width;
int col;
for (int yp = y0; yp < y1; yp++) {
int tp = yp * width;
int bp = (yp - yStart) * b.width - xStart;
for (int xp = x0; xp < x1; xp++) {
col = b.pixels[xp + bp];
if (col != 0)
pixels[xp + tp] = col;
}
}
}
/**
* Performs a Block Image Transfer (blit) operation. Copies part of some part of pixel data (except for the alpha colour 0x00000000) from the given bitmap onto this one.
*
* @param b Bitmap to copy.
* @param xStart X coordinate position on this bitmap to which begin copying.
* @param yStart Y coordinate position on this bitmap to which begin copying.
* @param xb X coordinate position on the given bitmap from which begin copying.
* @param yb Y coordinate position on the given bitmap from which begin copying.
* @param w Width of the area to copy.
* @param h Height of the area to copy.
*/
public void blitAlpha(Bitmap b, int xStart, int yStart, int xb, int yb, int w, int h) {
if (w < 0 || h < 0) return;
if (w > b.width - xb) w = b.width - xb;
if (h > b.height - yb) h = b.height - yb;
if (!(xb >= 0 && yb >= 0 && xb <= w && yb <= h)) return;
int x0 = xStart < 0 ? 0 : xStart;
int x1 = xStart + w;
int y0 = yStart < 0 ? 0 : yStart;
int y1 = yStart + h;
if (y1 > height) y1 = height;
if (x1 > width) x1 = width;
int col;
for (int yp = y0; yp < y1; yp++) {
int tp = yp * width;
int sp = (yp - yStart + yb) * b.width - (xStart - xb);
for (int xp = x0; xp < x1; xp++) {
col = b.pixels[xp + sp];
if (col != 0)
pixels[tp + xp] = col;
}
}
}
/**
* Gets the width of this bitmap.
*
* @return Width of this bitmap.
*/
public int getWidth() {
return width;
}
/**
* Gets the width of this bitmap.
*
* @return Width of this bitmap.
*/
public int getHeight() {
return height;
}
/**
* Creates a resized version of the given bitmap using "nearest neighbour" approach.
*
* @param b The bitmap to resize.
* @param width The width of the resized bitmap.
* @param height The height of the resized bitmap.
* @return A new instance of {@link Bitmap} which is resized version of <b>b</b>.
*/
public static Bitmap resize(Bitmap b, int width, int height) {
Bitmap result = new Bitmap(width, height);
double sw = (double) b.width / width;
double sh = (double) b.height / height;
for (int y = 0; y < height; y++) {
int yy = (int) (y * sh);
for (int x = 0; x < width; x++) {
int xx = (int) (x * sw);
result.pixels[x + y * width] = b.pixels[xx + yy * b.width];
}
}
return result;
}
/**
* Creates a copy of the given bitmap.
*
* @param b Bitmap to create a copy from.
* @return Copy of the given bitmap.
*/
public static Bitmap copy(Bitmap b) {
Bitmap result = new Bitmap(b.width, b.height);
result.blit(b, 0, 0);
return result;
}
/**
* Rotates the bitmap around its center and outputs the result as a new object.
*
* @param b Bitmap to rotate.
* @param angle Angle of rotation in radians.
* @return An object of {@link com.jpixel.image.Bitmap} containing rotated pixels.
*/
public static Bitmap rotate(Bitmap b, double angle) {
double vx_x = MathUtils.rotate_x(angle, 1.0, 0.0);
double vx_y = MathUtils.rotate_y(angle, 1.0, 0.0);
double vy_x = MathUtils.rotate_x(angle, 0.0, 1.0);
double vy_y = MathUtils.rotate_y(angle, 0.0, 1.0);
double ww = Math.abs(vx_x * b.width) + Math.abs(vx_y * b.height);
double hh = Math.abs(vx_y * b.width) + Math.abs(vy_y * b.height);
int w = (int) Math.round(ww);
int h = (int) Math.round(hh);
int cx = (int) ((w - b.width) / 2.0);
int cy = (int) ((h - b.height) / 2.0);
double nx0 = MathUtils.rotate_x(angle, -b.width / 2 - cx, -b.height / 2 - cy) + b.width / 2;
double ny0 = MathUtils.rotate_y(angle, -b.width / 2 - cx, -b.height / 2 - cy) + b.height / 2;
if (nx0 >= b.width) nx0--;
if (ny0 >= b.height) ny0--;
Bitmap result = new Bitmap(w, h);
for (int y = 0; y < h; y++) {
double pos2_x = nx0 + (y * vy_x);
double pos2_y = ny0 + (y * vy_y);
for (int x = 0; x < w; x++) {
int xx = (int) (pos2_x + (x * vx_x));
int yy = (int) (pos2_y + (x * vx_y));
if (xx < 0 || xx >= b.width || yy < 0 || yy >= b.height) continue;
result.pixels[x + y * w] = b.pixels[xx + yy * b.width];
}
}
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("[BITMAP] ");
sb.append(width).append("x").append(height);
sb.append(" Hash: ").append(Integer.toHexString(hashCode()));
return sb.toString();
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.casemodule;
import java.io.File;
import java.nio.file.Path;
import javax.swing.JPanel;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.logging.Level;
import java.util.UUID;
import javax.swing.filechooser.FileFilter;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.openide.util.lookup.ServiceProviders;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
import org.sleuthkit.autopsy.coreutils.DataSourceUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestStream;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitJNI;
import org.sleuthkit.datamodel.TskCoreException;
/**
* A image file data source processor that implements the DataSourceProcessor
* service provider interface to allow integration with the add data source
* wizard. It also provides a run method overload to allow it to be used
* independently of the wizard.
*/
@ServiceProviders(value = {
@ServiceProvider(service = DataSourceProcessor.class),
@ServiceProvider(service = AutoIngestDataSourceProcessor.class)}
)
public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSourceProcessor {
private final static String DATA_SOURCE_TYPE = NbBundle.getMessage(ImageDSProcessor.class, "ImageDSProcessor.dsType.text");
private final Logger logger = Logger.getLogger(ImageDSProcessor.class.getName());
private static final List<String> allExt = new ArrayList<>();
private static final GeneralFilter rawFilter = new GeneralFilter(GeneralFilter.RAW_IMAGE_EXTS, GeneralFilter.RAW_IMAGE_DESC);
private static final GeneralFilter encaseFilter = new GeneralFilter(GeneralFilter.ENCASE_IMAGE_EXTS, GeneralFilter.ENCASE_IMAGE_DESC);
private static final GeneralFilter virtualMachineFilter = new GeneralFilter(GeneralFilter.VIRTUAL_MACHINE_EXTS, GeneralFilter.VIRTUAL_MACHINE_DESC);
private static final String ALL_DESC = NbBundle.getMessage(ImageDSProcessor.class, "ImageDSProcessor.allDesc.text");
private static final GeneralFilter allFilter = new GeneralFilter(allExt, ALL_DESC);
private static final List<FileFilter> filtersList = new ArrayList<>();
private final ImageFilePanel configPanel;
private AddImageTask addImageTask;
private IngestStream ingestStream = null;
private Image image = null;
/*
* TODO: Remove the setDataSourceOptionsCalled flag and the settings fields
* when the deprecated method setDataSourceOptions is removed.
*/
private String deviceId;
private String imagePath;
private int sectorSize;
private String timeZone;
private boolean ignoreFatOrphanFiles;
private String md5;
private String sha1;
private String sha256;
private Host host = null;
static {
filtersList.add(allFilter);
filtersList.add(rawFilter);
filtersList.add(encaseFilter);
allExt.addAll(GeneralFilter.RAW_IMAGE_EXTS);
allExt.addAll(GeneralFilter.ENCASE_IMAGE_EXTS);
if (!System.getProperty("os.name").toLowerCase().contains("mac")) {
filtersList.add(virtualMachineFilter);
allExt.addAll(GeneralFilter.VIRTUAL_MACHINE_EXTS);
}
}
/**
* Constructs an image file data source processor that implements the
* DataSourceProcessor service provider interface to allow integration with
* the add data source wizard. It also provides a run method overload to
* allow it to be used independently of the wizard.
*/
public ImageDSProcessor() {
configPanel = ImageFilePanel.createInstance(ImageDSProcessor.class.getName(), filtersList);
}
/**
* Get the list of file filters supported by this DSP.
*
* @return A list of all supported file filters.
*/
static List<FileFilter> getFileFiltersList() {
return filtersList;
}
/**
* Gets a string that describes the type of data sources this processor is
* able to add to the case database. The string is suitable for display in a
* type selection UI component (e.g., a combo box).
*
* @return A data source type display string for this data source processor.
*/
public static String getType() {
return DATA_SOURCE_TYPE;
}
/**
* Gets a string that describes the type of data sources this processor is
* able to add to the case database. The string is suitable for display in a
* type selection UI component (e.g., a combo box).
*
* @return A data source type display string for this data source processor.
*/
@Override
public String getDataSourceType() {
return getType();
}
/**
* Gets the panel that allows a user to select a data source and do any
* configuration required by the data source. The panel is less than 544
* pixels wide and less than 173 pixels high.
*
* @return A selection and configuration panel for this data source
* processor.
*/
@Override
public JPanel getPanel() {
configPanel.reset();
configPanel.readSettings();
configPanel.select();
return configPanel;
}
/**
* Indicates whether the settings in the selection and configuration panel
* are valid and complete.
*
* @return True if the settings are valid and complete and the processor is
* ready to have its run method called, false otherwise.
*/
@Override
public boolean isPanelValid() {
return configPanel.validatePanel();
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Returns as soon as the background task is started.
* The background task uses a callback object to signal task completion and
* return results.
*
* This method should not be called unless isPanelValid returns true.
*
* @param host Host for this data source.
* @param progressMonitor Progress monitor that will be used by the
* background task to report progress.
* @param callback Callback that will be used by the background task
* to return results.
*/
@Override
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
ingestStream = new DefaultIngestStream();
readConfigSettings();
this.host = host;
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, this.host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
doAddImageProcess(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Files found during ingest will be sent directly to
* the IngestStream provided. Returns as soon as the background task is
* started. The background task uses a callback object to signal task
* completion and return results.
*
* This method should not be called unless isPanelValid returns true, and
* should only be called for DSPs that support ingest streams.
*
* @param settings The ingest job settings.
* @param progress Progress monitor that will be used by the background task
* to report progress.
* @param callBack Callback that will be used by the background task to
* return results.
*/
@Override
public void runWithIngestStream(IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
DataSourceProcessorCallback callBack) {
runWithIngestStream(null, settings, progress, callBack);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the settings provided by the selection and
* configuration panel. Files found during ingest will be sent directly to
* the IngestStream provided. Returns as soon as the background task is
* started. The background task uses a callback object to signal task
* completion and return results.
*
* This method should not be called unless isPanelValid returns true, and
* should only be called for DSPs that support ingest streams.
*
* @param host The host for this data source.
* @param settings The ingest job settings.
* @param progress Progress monitor that will be used by the background task
* to report progress.
* @param callBack Callback that will be used by the background task to
* return results.
*/
@Override
public void runWithIngestStream(Host host, IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
DataSourceProcessorCallback callBack) {
// Read the settings from the wizard
readConfigSettings();
this.host = host;
// Set up the data source before creating the ingest stream
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, this.host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
// Now initialize the ingest stream
try {
ingestStream = IngestManager.getInstance().openIngestStream(image, settings);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error starting ingest modules", ex);
// There was an error with ingest, but the data source has already been added
// so proceed with the defaultIngestStream. Code in openIngestStream
// should have caused a dialog to popup with the errors.
ingestStream = new DefaultIngestStream();
}
doAddImageProcess(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progress, callBack);
}
/**
* Store the options from the config panel.
*/
private void readConfigSettings() {
configPanel.storeSettings();
deviceId = UUID.randomUUID().toString();
imagePath = configPanel.getContentPaths();
sectorSize = configPanel.getSectorSize();
timeZone = configPanel.getTimeZone();
ignoreFatOrphanFiles = configPanel.getNoFatOrphans();
md5 = configPanel.getMd5();
if (md5.isEmpty()) {
md5 = null;
}
sha1 = configPanel.getSha1();
if (sha1.isEmpty()) {
sha1 = null;
}
sha256 = configPanel.getSha256();
if (sha256.isEmpty()) {
sha256 = null;
}
}
/**
* Check if this DSP supports ingest streams.
*
* @return True if this DSP supports an ingest stream, false otherwise.
*/
@Override
public boolean supportsIngestStream() {
return true;
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the given settings instead of those provided by the
* selection and configuration panel. Returns as soon as the background task
* is started and uses the callback object to signal task completion and
* return results.
*
* @param deviceId An ASCII-printable identifier for the device
* associated with the data source that is
* intended to be unique across multiple cases
* (e.g., a UUID).
* @param imagePath Path to the image file.
* @param timeZone The time zone to use when processing dates
* and times for the image, obtained from
* java.util.TimeZone.getID.
* @param ignoreFatOrphanFiles Whether to parse orphans if the image has a
* FAT filesystem.
* @param progressMonitor Progress monitor for reporting progress
* during processing.
* @param callback Callback to call when processing is done.
*/
public void run(String deviceId, String imagePath, String timeZone, boolean ignoreFatOrphanFiles, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
ingestStream = new DefaultIngestStream();
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, "", "", "", deviceId);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
doAddImageProcess(deviceId, imagePath, 0, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callback);
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the given settings instead of those provided by the
* selection and configuration panel. Returns as soon as the background task
* is started and uses the callback object to signal task completion and
* return results.
*
* The image should be loaded in the database and stored in "image" before
* calling this method. Additionally, an ingest stream should be initialized
* and stored in "ingestStream".
*
* @param deviceId An ASCII-printable identifier for the device
* associated with the data source that is
* intended to be unique across multiple cases
* (e.g., a UUID).
* @param imagePath Path to the image file.
* @param sectorSize The sector size (use '0' for autodetect).
* @param timeZone The time zone to use when processing dates
* and times for the image, obtained from
* java.util.TimeZone.getID.
* @param ignoreFatOrphanFiles Whether to parse orphans if the image has a
* FAT filesystem.
* @param md5 The MD5 hash of the image, may be null.
* @param sha1 The SHA-1 hash of the image, may be null.
* @param sha256 The SHA-256 hash of the image, may be null.
* @param progressMonitor Progress monitor for reporting progress
* during processing.
* @param callback Callback to call when processing is done.
*/
private void doAddImageProcess(String deviceId, String imagePath, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, String md5, String sha1, String sha256, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
// If the data source or ingest stream haven't been initialized, stop processing
if (ingestStream == null) {
String message = "Ingest stream was not initialized before running the add image process on " + imagePath;
logger.log(Level.SEVERE, message);
final List<String> errors = new ArrayList<>();
errors.add(message);
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
if (image == null) {
String message = "Image was not added to database before running the add image process on " + imagePath;
logger.log(Level.SEVERE, message);
final List<String> errors = new ArrayList<>();
errors.add(message);
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
AddImageTask.ImageDetails imageDetails = new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, null);
addImageTask = new AddImageTask(imageDetails,
progressMonitor,
new StreamingAddDataSourceCallbacks(ingestStream),
new StreamingAddImageTaskCallback(ingestStream, callback));
new Thread(addImageTask).start();
}
/**
* Requests cancellation of the background task that adds a data source to
* the case database, after the task is started using the run method. This
* is a "best effort" cancellation, with no guarantees that the case
* database will be unchanged. If cancellation succeeded, the list of new
* data sources returned by the background task will be empty.
*/
@Override
public void cancel() {
if (null != addImageTask) {
addImageTask.cancelTask();
}
if (ingestStream != null) {
ingestStream.stop();
}
}
/**
* Resets the selection and configuration panel for this data source
* processor.
*/
@Override
public void reset() {
deviceId = null;
imagePath = null;
timeZone = null;
ignoreFatOrphanFiles = false;
host = null;
configPanel.reset();
}
private static boolean isAcceptedByFiler(File file, List<FileFilter> filters) {
for (FileFilter filter : filters) {
if (filter.accept(file)) {
return true;
}
}
return false;
}
@Override
public int canProcess(Path dataSourcePath) throws AutoIngestDataSourceProcessorException {
// check file extension for supported types
if (!isAcceptedByFiler(dataSourcePath.toFile(), filtersList)) {
return 0;
}
try {
// verify that the image has a file system that TSK can process
if (!DataSourceUtils.imageHasFileSystem(dataSourcePath)) {
// image does not have a file system that TSK can process
return 0;
}
} catch (Exception ex) {
throw new AutoIngestDataSourceProcessorException("Exception inside canProcess() method", ex);
}
// able to process the data source
return 100;
}
@Override
public void process(String deviceId, Path dataSourcePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
process(deviceId, dataSourcePath, null, progressMonitor, callBack);
}
@Override
public void process(String deviceId, Path dataSourcePath, Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
// this method does not use the config panel
this.deviceId = deviceId;
this.imagePath = dataSourcePath.toString();
this.sectorSize = 0;
this.timeZone = Calendar.getInstance().getTimeZone().getID();
this.host = host;
this.ignoreFatOrphanFiles = false;
ingestStream = new DefaultIngestStream();
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, "", "", "", deviceId, host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return;
}
doAddImageProcess(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack);
}
@Override
public IngestStream processWithIngestStream(String deviceId, Path dataSourcePath, IngestJobSettings settings, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
return processWithIngestStream(deviceId, dataSourcePath, null, settings, progressMonitor, callBack);
}
@Override
public IngestStream processWithIngestStream(String deviceId, Path dataSourcePath, Host host, IngestJobSettings settings, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
// this method does not use the config panel
this.deviceId = deviceId;
this.imagePath = dataSourcePath.toString();
this.sectorSize = 0;
this.timeZone = Calendar.getInstance().getTimeZone().getID();
this.host = host;
this.ignoreFatOrphanFiles = false;
// Set up the data source before creating the ingest stream
try {
image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId, host);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return null;
}
// Now initialize the ingest stream
try {
ingestStream = IngestManager.getInstance().openIngestStream(image, settings);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error starting ingest modules", ex);
final List<String> errors = new ArrayList<>();
errors.add(ex.getMessage());
callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
return null;
}
doAddImageProcess(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack);
return ingestStream;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.json;
import org.apache.groovy.json.internal.JsonFastParser;
import org.apache.groovy.json.internal.JsonParserCharArray;
import org.apache.groovy.json.internal.JsonParserLax;
import org.apache.groovy.json.internal.JsonParserUsingCharacterSource;
import org.codehaus.groovy.runtime.DefaultGroovyMethodsSupport;
import org.codehaus.groovy.runtime.ResourceGroovyMethods;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
/**
* This has the same interface as the original JsonSlurper written for version 1.8.0, but its
* implementation has completely changed. It is now up to 20x faster than before, and its speed
* competes and often substantially exceeds popular common JSON parsers circa Jan, 2014.
* <p />
* JSON slurper parses text or reader content into a data structure of lists and maps.
* <p>
* Example usage:
* <code><pre class="groovyTestCase">
* def slurper = new groovy.json.JsonSlurper()
* def result = slurper.parseText('{"person":{"name":"Guillaume","age":33,"pets":["dog","cat"]}}')
*
* assert result.person.name == "Guillaume"
* assert result.person.age == 33
* assert result.person.pets.size() == 2
* assert result.person.pets[0] == "dog"
* assert result.person.pets[1] == "cat"
* </pre></code>
*
* JsonSlurper can use several types of JSON parsers. Please read the documentation for
* JsonParserType. There are relaxed mode parsers, large file parser, and index overlay parsers.
* Don't worry, it is all groovy. JsonSlurper will just work, but understanding the different parser
* types may allow you to drastically improve the performance of your JSON parsing.
* <p />
*
* Index overlay parsers (INDEX_OVERLAY and LAX) are the fastest JSON parsers.
* However they are not the default for a good reason.
* Index overlay parsers has pointers (indexes really) to original char buffer.
* Care must be used if putting parsed maps into a long term cache as members of map
* maybe index overlay objects pointing to original buffer.
* You can mitigate these risks by using chop and lazy chop properties.
* <p />
* Chop eagerly dices up the buffer so each Value element points to a small copy of the original buffer.
* <p />
* Lazy Chop dices up the buffer when a list get or map get is called so if an GPath expression or
* such is applied.
* <p />
* You do not need chop or lazy chop if you are NOT putting the map into a long term cache.
* You do not need chop or lazy chop if you are doing object de-serialization.
* Recommendation is to use INDEX_OVERLAY for JSON buffers under 2MB.
* The maxSizeForInMemory is set to 2MB and any file over 2MB will use a parser designed for
* large files, which is slower than the INDEX_OVERLAY, LAX, and CHAR_BUFFER parsers, but
* faster than most commonly used JSON parsers on the JVM for most use cases circa January 2014.
* <p />
* To enable the INDEX_OVERLAY parser do this:
*
* <code><pre>
* parser = new JsonSlurper().setType(JsonParserType.INDEX_OVERLAY);
* </pre></code>
*
* @see groovy.json.JsonParserType
* @since 1.8.0
*/
public class JsonSlurper {
private int maxSizeForInMemory = 2000000;
private boolean chop = false;
private boolean lazyChop = true;
private boolean checkDates = true;
private JsonParserType type = JsonParserType.CHAR_BUFFER;
/**
* Max size before Slurper starts to use windowing buffer parser.
* @return size of file/buffer
* @since 2.3
*/
public int getMaxSizeForInMemory() {
return maxSizeForInMemory;
}
/**
* Max size before Slurper starts to use windowing buffer parser.
* @since 2.3
* @return JsonSlurper
*/
public JsonSlurper setMaxSizeForInMemory(int maxSizeForInMemory) {
this.maxSizeForInMemory = maxSizeForInMemory;
return this;
}
/** Parser type.
* @since 2.3
* @see groovy.json.JsonParserType
* @return type
*/
public JsonParserType getType() {
return type;
}
/** Parser type.
* @since 2.3
* @see groovy.json.JsonParserType
* @return JsonSlurper
*/
public JsonSlurper setType(JsonParserType type) {
this.type = type;
return this;
}
/** Turns on buffer chopping for index overlay.
* @since 2.3
* @see groovy.json.JsonParserType
* @return chop on or off
*/
public boolean isChop() {
return chop;
}
/** Turns on buffer chopping for index overlay.
* @since 2.3
* @see groovy.json.JsonParserType
* @return JsonSlurper
*/
public JsonSlurper setChop(boolean chop) {
this.chop = chop;
return this;
}
/** Turns on buffer lazy chopping for index overlay.
* @see groovy.json.JsonParserType
* @return on or off
* @since 2.3
*/
public boolean isLazyChop() {
return lazyChop;
}
/** Turns on buffer lazy chopping for index overlay.
* @see groovy.json.JsonParserType
* @return JsonSlurper
* @since 2.3
*/
public JsonSlurper setLazyChop(boolean lazyChop) {
this.lazyChop = lazyChop;
return this;
}
/**
* Determine if slurper will automatically parse strings it recognizes as dates. Index overlay only.
* @return on or off
* @since 2.3
*/
public boolean isCheckDates() {
return checkDates;
}
/**
* Determine if slurper will automatically parse strings it recognizes as dates. Index overlay only.
* @return on or off
* @since 2.3
*/
public JsonSlurper setCheckDates(boolean checkDates) {
this.checkDates = checkDates;
return this;
}
/**
* Parse a text representation of a JSON data structure
*
* @param text JSON text to parse
* @return a data structure of lists and maps
*/
public Object parseText(String text) {
if (text == null || text.isEmpty()) {
throw new IllegalArgumentException("Text must not be null or empty");
}
return createParser().parse(text);
}
/**
* Parse a JSON data structure from content from a reader
*
* @param reader reader over a JSON content
* @return a data structure of lists and maps
*/
public Object parse(Reader reader) {
if (reader == null) {
throw new IllegalArgumentException("Reader must not be null");
}
Object content;
JsonParser parser = createParser();
content = parser.parse(reader);
return content;
}
/**
* Parse a JSON data structure from content from an inputStream
*
* @param inputStream stream over a JSON content
* @return a data structure of lists and maps
* @since 2.3
*/
public Object parse(InputStream inputStream) {
if (inputStream == null) {
throw new IllegalArgumentException("inputStream must not be null");
}
Object content;
JsonParser parser = createParser();
content = parser.parse(inputStream);
return content;
}
/**
* Parse a JSON data structure from content from an inputStream
*
* @param inputStream stream over a JSON content
* @param charset charset
* @return a data structure of lists and maps
* @since 2.3
*/
public Object parse(InputStream inputStream, String charset) {
if (inputStream == null) {
throw new IllegalArgumentException("inputStream must not be null");
}
if (charset == null) {
throw new IllegalArgumentException("charset must not be null");
}
Object content;
content = createParser().parse(inputStream, charset);
return content;
}
/**
* Parse a JSON data structure from content from a byte array.
*
* @param bytes buffer of JSON content
* @param charset charset
* @return a data structure of lists and maps
* @since 2.3
*/
public Object parse(byte [] bytes, String charset) {
if (bytes == null) {
throw new IllegalArgumentException("bytes must not be null");
}
if (charset == null) {
throw new IllegalArgumentException("charset must not be null");
}
Object content;
content = createParser().parse(bytes, charset);
return content;
}
/**
* Parse a JSON data structure from content from a byte array.
*
* @param bytes buffer of JSON content
* @return a data structure of lists and maps
* @since 2.3
*/
public Object parse(byte [] bytes) {
if (bytes == null) {
throw new IllegalArgumentException("bytes must not be null");
}
Object content;
content = createParser().parse(bytes);
return content;
}
/**
* Parse a JSON data structure from content from a char array.
*
* @param chars buffer of JSON content
* @return a data structure of lists and maps
* @since 2.3
*/
public Object parse(char [] chars) {
if (chars == null) {
throw new IllegalArgumentException("chars must not be null");
}
Object content;
content = createParser().parse(chars);
return content;
}
private JsonParser createParser() {
switch (type) {
case LAX:
return new JsonParserLax(false, chop, lazyChop, checkDates);
case CHAR_BUFFER:
return new JsonParserCharArray();
case CHARACTER_SOURCE:
return new JsonParserUsingCharacterSource();
case INDEX_OVERLAY:
return new JsonFastParser(false, chop, lazyChop, checkDates);
default:
return new JsonParserCharArray();
}
}
/**
* Parse a JSON data structure from content within a given Path.
*
* @param path {@link Path} containing JSON content
* @return a data structure of lists and maps
*/
public Object parse(Path path) throws IOException {
return parse(Files.newInputStream(path));
}
/**
* Parse a JSON data structure from content within a given Path.
*
* @param path {@link Path} containing JSON content
* @param charset the charset for this File
* @return a data structure of lists and maps
*/
public Object parse(Path path, String charset) throws IOException {
return parse(Files.newInputStream(path), charset);
}
/**
* Parse a JSON data structure from content within a given File.
*
* @param file File containing JSON content
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(File file) {
return parseFile(file, null);
}
/**
* Parse a JSON data structure from content within a given File.
*
* @param file File containing JSON content
* @param charset the charset for this File
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(File file, String charset) {
return parseFile(file, charset);
}
private Object parseFile(File file, String charset) {
if (file.length() < maxSizeForInMemory) {
return createParser().parse(file, charset);
} else {
return new JsonParserUsingCharacterSource().parse(file, charset);
}
}
/**
* Parse a JSON data structure from content at a given URL.
*
* @param url URL containing JSON content
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(URL url) {
return parseURL(url, null);
}
/**
* Parse a JSON data structure from content at a given URL.
*
* @param url URL containing JSON content
* @param params connection parameters
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(URL url, Map params) {
return parseURL(url, params);
}
/**
* Parse a JSON data structure from content at a given URL. Convenience variant when using Groovy named parameters for the connection params.
*
* @param params connection parameters
* @param url URL containing JSON content
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(Map params, URL url) {
return parseURL(url, params);
}
private Object parseURL(URL url, Map params) {
Reader reader = null;
try {
if (params == null || params.isEmpty()) {
reader = ResourceGroovyMethods.newReader(url);
} else {
reader = ResourceGroovyMethods.newReader(url, params);
}
return createParser().parse(reader);
} catch (IOException ioe) {
throw new JsonException("Unable to process url: " + url.toString(), ioe);
} finally {
if (reader != null) {
DefaultGroovyMethodsSupport.closeWithWarning(reader);
}
}
}
/**
* Parse a JSON data structure from content at a given URL.
*
* @param url URL containing JSON content
* @param charset the charset for this File
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(URL url, String charset) {
return parseURL(url, null, charset);
}
/**
* Parse a JSON data structure from content at a given URL.
*
* @param url URL containing JSON content
* @param params connection parameters
* @param charset the charset for this File
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(URL url, Map params, String charset) {
return parseURL(url, params, charset);
}
/**
* Parse a JSON data structure from content at a given URL. Convenience variant when using Groovy named parameters for the connection params.
*
* @param params connection parameters
* @param url URL containing JSON content
* @param charset the charset for this File
* @return a data structure of lists and maps
* @since 2.2.0
*/
public Object parse(Map params, URL url, String charset) {
return parseURL(url, params, charset);
}
private Object parseURL(URL url, Map params, String charset) {
Reader reader = null;
try {
if (params == null || params.isEmpty()) {
reader = ResourceGroovyMethods.newReader(url, charset);
} else {
reader = ResourceGroovyMethods.newReader(url, params, charset);
}
return parse(reader);
} catch (IOException ioe) {
throw new JsonException("Unable to process url: " + url.toString(), ioe);
} finally {
if (reader != null) {
DefaultGroovyMethodsSupport.closeWithWarning(reader);
}
}
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.file.impl;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.impl.scopes.LibraryRuntimeClasspathScope;
import com.intellij.openapi.progress.ProgressIndicatorProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.LibraryScopeCache;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiManagerImpl;
import com.intellij.psi.impl.ResolveScopeManager;
import com.intellij.psi.impl.source.resolve.FileContextUtil;
import com.intellij.psi.search.DelegatingGlobalSearchScope;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.util.containers.ConcurrentFactoryMap;
import consulo.annotation.access.RequiredReadAction;
import consulo.roots.OrderEntryWithTracking;
import javax.annotation.Nonnull;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Singleton
public class ResolveScopeManagerImpl extends ResolveScopeManager {
private final Project myProject;
private final ProjectRootManager myProjectRootManager;
private final PsiManager myManager;
private final Map<VirtualFile, GlobalSearchScope> myDefaultResolveScopesCache;
@Inject
public ResolveScopeManagerImpl(Project project, ProjectRootManager projectRootManager, PsiManager psiManager) {
myProject = project;
myProjectRootManager = projectRootManager;
myManager = psiManager;
myDefaultResolveScopesCache = ConcurrentFactoryMap.createMap((key) -> {
GlobalSearchScope scope = null;
for (ResolveScopeProvider resolveScopeProvider : ResolveScopeProvider.EP_NAME.getExtensions()) {
scope = resolveScopeProvider.getResolveScope(key, myProject);
if (scope != null) break;
}
if (scope == null) scope = getInherentResolveScope(key);
for (ResolveScopeEnlarger enlarger : ResolveScopeEnlarger.EP_NAME.getExtensions()) {
final SearchScope extra = enlarger.getAdditionalResolveScope(key, myProject);
if (extra != null) {
scope = scope.union(extra);
}
}
return scope;
});
((PsiManagerImpl)psiManager).registerRunnableToRunOnChange(myDefaultResolveScopesCache::clear);
}
@Nonnull
private GlobalSearchScope getInherentResolveScope(VirtualFile vFile) {
ProjectFileIndex projectFileIndex = myProjectRootManager.getFileIndex();
Module module = projectFileIndex.getModuleForFile(vFile);
if (module != null) {
boolean includeTests = TestSourcesFilter.isTestSources(vFile, myProject);
return GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module, includeTests);
}
else {
// resolve references in libraries in context of all modules which contain it
List<Module> modulesLibraryUsedIn = new ArrayList<>();
List<OrderEntry> orderEntries = projectFileIndex.getOrderEntriesForFile(vFile);
LibraryOrderEntry lib = null;
for (OrderEntry entry : orderEntries) {
if (entry instanceof ModuleExtensionWithSdkOrderEntry) {
modulesLibraryUsedIn.add(entry.getOwnerModule());
}
else if (entry instanceof LibraryOrderEntry) {
lib = (LibraryOrderEntry)entry;
modulesLibraryUsedIn.add(entry.getOwnerModule());
}
else if (entry instanceof ModuleOrderEntry) {
modulesLibraryUsedIn.add(entry.getOwnerModule());
}
else if (entry instanceof OrderEntryWithTracking) {
modulesLibraryUsedIn.add(entry.getOwnerModule());
}
}
GlobalSearchScope allCandidates = LibraryScopeCache.getInstance(myProject).getScopeForLibraryUsedIn(modulesLibraryUsedIn);
if (lib != null) {
final LibraryRuntimeClasspathScope preferred = new LibraryRuntimeClasspathScope(myProject, lib);
// prefer current library
return new DelegatingGlobalSearchScope(allCandidates, preferred) {
@Override
public int compare(@Nonnull VirtualFile file1, @Nonnull VirtualFile file2) {
boolean c1 = preferred.contains(file1);
boolean c2 = preferred.contains(file2);
if (c1 && !c2) return 1;
if (c2 && !c1) return -1;
return super.compare(file1, file2);
}
};
}
return allCandidates;
}
}
@Override
@Nonnull
public GlobalSearchScope getResolveScope(@Nonnull PsiElement element) {
ProgressIndicatorProvider.checkCanceled();
VirtualFile vFile;
final PsiFile contextFile;
if (element instanceof PsiDirectory) {
vFile = ((PsiDirectory)element).getVirtualFile();
contextFile = null;
}
else {
final PsiFile containingFile = element.getContainingFile();
if (containingFile instanceof PsiCodeFragment) {
final GlobalSearchScope forcedScope = ((PsiCodeFragment)containingFile).getForcedResolveScope();
if (forcedScope != null) {
return forcedScope;
}
final PsiElement context = containingFile.getContext();
if (context == null) {
return GlobalSearchScope.allScope(myProject);
}
return getResolveScope(context);
}
contextFile = containingFile != null ? FileContextUtil.getContextFile(containingFile) : null;
if (contextFile == null) {
return GlobalSearchScope.allScope(myProject);
}
else if (contextFile instanceof FileResolveScopeProvider) {
return ((FileResolveScopeProvider)contextFile).getFileResolveScope();
}
vFile = contextFile.getOriginalFile().getVirtualFile();
}
if (vFile == null || contextFile == null) {
return GlobalSearchScope.allScope(myProject);
}
return myDefaultResolveScopesCache.get(vFile);
}
@Nonnull
@RequiredReadAction
@Override
public GlobalSearchScope getDefaultResolveScope(final VirtualFile vFile) {
final PsiFile psiFile = myManager.findFile(vFile);
assert psiFile != null;
return myDefaultResolveScopesCache.get(vFile);
}
@Override
@Nonnull
public GlobalSearchScope getUseScope(@Nonnull PsiElement element) {
Pair<GlobalSearchScope, VirtualFile> pair = getDefaultResultScopeInfo(element);
if(pair.getSecond() == null) {
return pair.getFirst();
}
GlobalSearchScope targetScope = pair.getFirst();
for (ResolveScopeEnlarger scopeEnlarger : ResolveScopeEnlarger.EP_NAME.getExtensions()) {
SearchScope scope = scopeEnlarger.getAdditionalUseScope(pair.getSecond(), element.getProject());
if(scope != null) {
targetScope = targetScope.union(scope);
}
}
return targetScope;
}
@Nonnull
private Pair<GlobalSearchScope, VirtualFile> getDefaultResultScopeInfo(@Nonnull PsiElement element) {
VirtualFile vFile;
final GlobalSearchScope allScope = GlobalSearchScope.allScope(myManager.getProject());
if (element instanceof PsiDirectory) {
vFile = ((PsiDirectory)element).getVirtualFile();
}
else {
final PsiFile containingFile = element.getContainingFile();
if (containingFile == null) return Pair.create(allScope, null);
vFile = containingFile.getVirtualFile();
}
if (vFile == null) return Pair.create(allScope, null);
ProjectFileIndex projectFileIndex = myProjectRootManager.getFileIndex();
Module module = projectFileIndex.getModuleForFile(vFile);
if (module != null) {
boolean isTest = TestSourcesFilter.isTestSources(vFile, element.getProject());
GlobalSearchScope scope = isTest ? GlobalSearchScope.moduleTestsWithDependentsScope(module) : GlobalSearchScope.moduleWithDependentsScope(module);
return Pair.create(scope, vFile);
}
else {
final PsiFile f = element.getContainingFile();
final VirtualFile vf = f == null ? null : f.getVirtualFile();
GlobalSearchScope scope =
f == null || vf == null || vf.isDirectory() || allScope.contains(vf) ? allScope : GlobalSearchScope.fileScope(f).uniteWith(allScope);
return Pair.create(scope, vf);
}
}
}
| |
package com.usst.app.order.sale.saleOrder.model;
import java.util.Date;
import java.util.List;
import com.usst.app.good.good.model.Good;
import com.usst.app.order.sale.saleWare.model.SaleWare;
import com.usst.code.bean.Base;
public class SaleOrder extends Base {
private static final long serialVersionUID = -8738255760779509559L;
private String type;
private String bankAccountId;
private String bankAccountName;
private String warehouseId;
private String warehouseName;
private String customerId;
private String customerName;
private Double priceDiscount;
private Double orderMoney;
private String linkman;
private String mobile;
private String address;
private String telephone;
private String email;
private String zipCode;
private String paymentId;
private String paymentCode;
private String paymentName;
private String deliveryId;
private String deliveryCode;
private String deliveryName;
private String deliveryDate;
private Double deliveryCost;
private String orderState;
private String deliveryState;
private String paymentState;
private String remark;
private String handlerId;
private String handlerName;
private String deptId;
private String deptName;
private String invoiceType;
private String invoicePayable;
private String invoiceContent;
private double totalCredits;
private Date oneMonthAgo;
private Date oneMonthAfter;
private String zfbTradeNo;
private int orderStateNum;
private String iscancel;
private Date orderstateTime;
private Date deliveryTime;
private Date paymentTime;
private Date cancelTime;
private String dateTime = "0";
private String employcouponId;
private String srachcode;
private List<Good> goodlist;
private List<SaleWare> warelist;
private String data;
private String search;
private String begintime;
private String endtime;
private String bankCode;
private String searchOrder;
public String getSearchOrder() {
return this.searchOrder;
}
public void setSearchOrder(String searchOrder) {
this.searchOrder = searchOrder;
}
public String getBegintime() {
return this.begintime;
}
public void setBegintime(String begintime) {
this.begintime = begintime;
}
public String getEndtime() {
return this.endtime;
}
public void setEndtime(String endtime) {
this.endtime = endtime;
}
public String getData() {
return this.data;
}
public void setData(String data) {
this.data = data;
}
public String getSearch() {
return this.search;
}
public void setSearch(String search) {
this.search = search;
}
public List<SaleWare> getWarelist() {
return this.warelist;
}
public void setWarelist(List<SaleWare> warelist) {
this.warelist = warelist;
}
public List<Good> getGoodlist() {
return this.goodlist;
}
public void setGoodlist(List<Good> goodlist) {
this.goodlist = goodlist;
}
public String getType() {
return this.type;
}
public String getBankAccountId() {
return this.bankAccountId;
}
public String getBankAccountName() {
return this.bankAccountName;
}
public String getCustomerId() {
return this.customerId;
}
public String getCustomerName() {
return this.customerName;
}
public Double getPriceDiscount() {
return this.priceDiscount;
}
public Double getOrderMoney() {
return this.orderMoney;
}
public String getLinkman() {
return this.linkman;
}
public String getMobile() {
return this.mobile;
}
public String getPaymentId() {
return this.paymentId;
}
public String getPaymentCode() {
return this.paymentCode;
}
public String getPaymentName() {
return this.paymentName;
}
public String getDeliveryId() {
return this.deliveryId;
}
public String getDeliveryCode() {
return this.deliveryCode;
}
public String getDeliveryName() {
return this.deliveryName;
}
public String getDeliveryDate() {
return this.deliveryDate;
}
public Double getDeliveryCost() {
return this.deliveryCost;
}
public String getOrderState() {
return this.orderState;
}
public String getDeliveryState() {
return this.deliveryState;
}
public String getPaymentState() {
return this.paymentState;
}
public String getRemark() {
return this.remark;
}
public String getHandlerId() {
return this.handlerId;
}
public String getHandlerName() {
return this.handlerName;
}
public String getDeptId() {
return this.deptId;
}
public String getDeptName() {
return this.deptName;
}
public void setType(String type) {
this.type = type;
}
public void setBankAccountId(String bankAccountId) {
this.bankAccountId = bankAccountId;
}
public void setBankAccountName(String bankAccountName) {
this.bankAccountName = bankAccountName;
}
public void setCustomerId(String customerId) {
this.customerId = customerId;
}
public void setCustomerName(String customerName) {
this.customerName = customerName;
}
public void setPriceDiscount(Double priceDiscount) {
this.priceDiscount = priceDiscount;
}
public void setOrderMoney(Double orderMoney) {
this.orderMoney = orderMoney;
}
public void setLinkman(String linkman) {
this.linkman = linkman;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public void setPaymentId(String paymentId) {
this.paymentId = paymentId;
}
public void setPaymentCode(String paymentCode) {
this.paymentCode = paymentCode;
}
public void setPaymentName(String paymentName) {
this.paymentName = paymentName;
}
public void setDeliveryId(String deliveryId) {
this.deliveryId = deliveryId;
}
public void setDeliveryCode(String deliveryCode) {
this.deliveryCode = deliveryCode;
}
public void setDeliveryName(String deliveryName) {
this.deliveryName = deliveryName;
}
public void setDeliveryDate(String deliveryDate) {
this.deliveryDate = deliveryDate;
}
public void setDeliveryCost(Double deliveryCost) {
this.deliveryCost = deliveryCost;
}
public void setOrderState(String orderState) {
this.orderState = orderState;
}
public void setDeliveryState(String deliveryState) {
this.deliveryState = deliveryState;
}
public void setPaymentState(String paymentState) {
this.paymentState = paymentState;
}
public void setRemark(String remark) {
this.remark = remark;
}
public void setHandlerId(String handlerId) {
this.handlerId = handlerId;
}
public void setHandlerName(String handlerName) {
this.handlerName = handlerName;
}
public void setDeptId(String deptId) {
this.deptId = deptId;
}
public void setDeptName(String deptName) {
this.deptName = deptName;
}
public String getWarehouseId() {
return this.warehouseId;
}
public String getWarehouseName() {
return this.warehouseName;
}
public void setWarehouseId(String warehouseId) {
this.warehouseId = warehouseId;
}
public void setWarehouseName(String warehouseName) {
this.warehouseName = warehouseName;
}
public String getAddress() {
return this.address;
}
public void setAddress(String address) {
this.address = address;
}
public String getTelephone() {
return this.telephone;
}
public void setTelephone(String telephone) {
this.telephone = telephone;
}
public String getEmail() {
return this.email;
}
public void setEmail(String email) {
this.email = email;
}
public String getZipCode() {
return this.zipCode;
}
public void setZipCode(String zipCode) {
this.zipCode = zipCode;
}
public String getDateTime() {
return this.dateTime;
}
public void setDateTime(String dateTime) {
this.dateTime = dateTime;
}
public String getInvoiceType() {
return this.invoiceType;
}
public void setInvoiceType(String invoiceType) {
this.invoiceType = invoiceType;
}
public String getInvoicePayable() {
return this.invoicePayable;
}
public void setInvoicePayable(String invoicePayable) {
this.invoicePayable = invoicePayable;
}
public String getInvoiceContent() {
return this.invoiceContent;
}
public void setInvoiceContent(String invoiceContent) {
this.invoiceContent = invoiceContent;
}
public double getTotalCredits() {
return this.totalCredits;
}
public void setTotalCredits(double totalCredits) {
this.totalCredits = totalCredits;
}
public Date getOneMonthAgo() {
return this.oneMonthAgo;
}
public void setOneMonthAgo(Date oneMonthAgo) {
this.oneMonthAgo = oneMonthAgo;
}
public Date getOneMonthAfter() {
return this.oneMonthAfter;
}
public void setOneMonthAfter(Date oneMonthAfter) {
this.oneMonthAfter = oneMonthAfter;
}
public String getEmploycouponId() {
return this.employcouponId;
}
public void setEmploycouponId(String employcouponId) {
this.employcouponId = employcouponId;
}
public String getZfbTradeNo() {
return this.zfbTradeNo;
}
public void setZfbTradeNo(String zfbTradeNo) {
this.zfbTradeNo = zfbTradeNo;
}
public int getOrderStateNum() {
return this.orderStateNum;
}
public void setOrderStateNum(int orderStateNum) {
this.orderStateNum = orderStateNum;
}
public String getSrachcode() {
return this.srachcode;
}
public void setSrachcode(String srachcode) {
this.srachcode = srachcode;
}
public String getIscancel() {
return this.iscancel;
}
public void setIscancel(String iscancel) {
this.iscancel = iscancel;
}
public String getBankCode() {
return this.bankCode;
}
public void setBankCode(String bankCode) {
this.bankCode = bankCode;
}
public Date getOrderstateTime() {
return this.orderstateTime;
}
public void setOrderstateTime(Date orderstateTime) {
this.orderstateTime = orderstateTime;
}
public Date getDeliveryTime() {
return this.deliveryTime;
}
public void setDeliveryTime(Date deliveryTime) {
this.deliveryTime = deliveryTime;
}
public Date getPaymentTime() {
return this.paymentTime;
}
public void setPaymentTime(Date paymentTime) {
this.paymentTime = paymentTime;
}
public Date getCancelTime() {
return this.cancelTime;
}
public void setCancelTime(Date cancelTime) {
this.cancelTime = cancelTime;
}
}
| |
/*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.dmdirc.ui.core.dialogs.sslcertificate;
import com.dmdirc.tls.CertificateAction;
import com.dmdirc.tls.CertificateDoesntMatchHostException;
import com.dmdirc.tls.CertificateHostChecker;
import com.dmdirc.tls.CertificateManager;
import com.dmdirc.tls.CertificateNotTrustedException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateExpiredException;
import java.security.cert.CertificateNotYetValidException;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* Model for SSL certificate dialogs.
*
* @since 0.6.3m1
*/
public class SSLCertificateDialogModel {
/** The text to use if a field isn't present on the certificate. */
private static final String NOTPRESENT = "(not present on certificate)";
/** The certificate chain that we're displaying information about. */
private final List<X509Certificate> chain;
/** The certificate manager for the connection attempt. */
private final CertificateManager manager;
/** The list of problems found with the certs, if any. */
private final Collection<CertificateException> problems;
/** Checker to use for hostnames. */
private final CertificateHostChecker hostChecker;
/**
* Creates a new SSLCertificateDialogModel for the specified chain.
*
* @param chain The chain of certificates to display info on
* @param problems A list of problems with the certificates, if any
* @param manager The certificate manager responsible for the certs
*/
public SSLCertificateDialogModel(final List<X509Certificate> chain,
final Collection<CertificateException> problems,
final CertificateManager manager) {
this.chain = chain;
this.problems = problems;
this.manager = manager;
this.hostChecker = new CertificateHostChecker();
}
/**
* Retrieves the certificate chain that's under question.
*
* @return A list of {@link CertificateChainEntry}s corresponding to the certificate chain being
* questioned.
*/
public List<CertificateChainEntry> getCertificateChain() {
final List<CertificateChainEntry> res = new ArrayList<>();
boolean first = true;
for (X509Certificate cert : chain) {
boolean invalid = first && !hostChecker.isValidFor(cert, manager.getServerName());
first = false;
try {
cert.checkValidity();
} catch (CertificateException ex) {
invalid |= true;
}
res.add(new CertificateChainEntry(CertificateManager
.getDNFieldsFromCert(cert).get("CN"),
manager.isTrusted(cert).isTrusted(), invalid));
}
return res;
}
/**
* Retrieves displayable information about the certificate with the specified index in the
* chain.
*
* @param index The index of the certificate to request information on
*
* @return A list of lists of {@link CertificateInformationEntry}s.
*/
public List<List<CertificateInformationEntry>> getCertificateInfo(final int index) {
final List<List<CertificateInformationEntry>> res = new ArrayList<>();
final X509Certificate cert = chain.get(index);
List<CertificateInformationEntry> group;
boolean tooOld = false;
boolean tooNew = false;
try {
cert.checkValidity();
} catch (CertificateExpiredException ex) {
tooOld = true;
} catch (CertificateNotYetValidException ex) {
tooNew = true;
}
group = new ArrayList<>();
group.add(new CertificateInformationEntry("Valid from",
cert.getNotBefore().toString(), tooNew, false));
group.add(new CertificateInformationEntry("Valid to",
cert.getNotAfter().toString(), tooOld, false));
res.add(group);
final boolean wrongName = index == 0 && !hostChecker.isValidFor(cert, manager.getServerName());
final String names = getAlternateNames(cert);
final Map<String, String> fields = CertificateManager.getDNFieldsFromCert(cert);
group = new ArrayList<>();
addCertField(fields, group, "Common name", "CN", wrongName);
group.add(new CertificateInformationEntry("Alternate names",
names == null ? NOTPRESENT : names, wrongName, names == null));
addCertField(fields, group, "Organisation", "O", false);
addCertField(fields, group, "Unit", "OU", false);
addCertField(fields, group, "Locality", "L", false);
addCertField(fields, group, "State", "ST", false);
addCertField(fields, group, "Country", "C", false);
res.add(group);
group = new ArrayList<>();
group.add(new CertificateInformationEntry("Serial number",
cert.getSerialNumber().toString(), false, false));
group.add(new CertificateInformationEntry("Algorithm",
cert.getSigAlgName(), false, false));
group.add(new CertificateInformationEntry("SSL version",
String.valueOf(cert.getVersion()), false, false));
res.add(group);
return res;
}
/**
* Retrieves a list of all the alternate names of the specified certificates as a
* comma-separated string.
*
* @param cert The certificate to retrieve alternate names for
*
* @return A comma-separated list of alternate names
*/
private String getAlternateNames(final X509Certificate cert) {
final StringBuilder res = new StringBuilder();
try {
if (cert.getSubjectAlternativeNames() == null) {
return null;
}
for (List<?> entry : cert.getSubjectAlternativeNames()) {
final int type = (Integer) entry.get(0);
// DNS or IP
if (type == 2 || type == 7) {
if (res.length() > 0) {
res.append(", ");
}
res.append(entry.get(1));
}
}
} catch (CertificateParsingException ex) {
// Do nothing
}
return res.toString();
}
/**
* Adds a field to the specified group.
*
* @param fields The fields extracted from the certificate
* @param group The group to add an entry to
* @param title The user-friendly title of the field
* @param field The name of the field to look for
* @param invalid Whether or not the field is a cause for concern
*/
private void addCertField(
final Map<String, String> fields,
final List<CertificateInformationEntry> group,
final String title,
final String field,
final boolean invalid) {
group.add(new CertificateInformationEntry(title, fields.getOrDefault(field, NOTPRESENT), invalid,
!fields.containsKey(field)));
}
/**
* Retrieves a list of summary elements to describe the overall status of the certificate chain.
*
* @return A list of summary entries
*/
public List<CertificateSummaryEntry> getSummary() {
final List<CertificateSummaryEntry> res = new ArrayList<>();
boolean outOfDate = false;
boolean wrongHost = false;
boolean notTrusted = false;
for (CertificateException ex : problems) {
if (ex instanceof CertificateExpiredException
|| ex instanceof CertificateNotYetValidException) {
outOfDate = true;
} else if (ex instanceof CertificateDoesntMatchHostException) {
wrongHost = true;
} else if (ex instanceof CertificateNotTrustedException) {
notTrusted = true;
}
}
if (outOfDate) {
res.add(new CertificateSummaryEntry("One or more certificates are "
+ "not within their validity period", false));
} else {
res.add(new CertificateSummaryEntry("All certificates are "
+ "within their validity period", true));
}
if (notTrusted) {
res.add(new CertificateSummaryEntry("The certificate is not issued "
+ "by a trusted authority", false));
} else {
res.add(new CertificateSummaryEntry("The certificate chain is "
+ "trusted", true));
}
if (wrongHost) {
res.add(new CertificateSummaryEntry("The certificate is not issued "
+ "to the host you are connecting to", false));
} else {
res.add(new CertificateSummaryEntry("The certificate is issued "
+ "to the host you are connecting to", true));
}
return res;
}
/**
* Determines whether or not a response is required from the user about this certificate chain.
*
* @return True if a response is required, false otherwise
*/
public boolean needsResponse() {
return !problems.isEmpty();
}
/**
* Retrieves the name of the server to which the user is trying to connect.
*
* @return The name of the server that the user is trying to connect to
*/
public String getServerName() {
return manager.getServerName();
}
/**
* Performs the specified action on the certificate chain/connection. Should only be called once
* per instance, and only if {@link #needsResponse()} returns true.
*
* @param action The action to be performed
*/
public void performAction(final CertificateAction action) {
if (!needsResponse()) {
throw new IllegalStateException("Can't perform action when "
+ "no action is needed");
}
manager.setAction(action);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.audit.AuditLogContext;
import org.apache.cassandra.audit.AuditLogEntryType;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.db.guardrails.Guardrails;
import org.apache.cassandra.schema.ColumnMetadata;
import org.apache.cassandra.schema.Schema;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.schema.TableMetadataRef;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.functions.Function;
import org.apache.cassandra.cql3.restrictions.StatementRestrictions;
import org.apache.cassandra.cql3.selection.RawSelector;
import org.apache.cassandra.cql3.selection.ResultSetBuilder;
import org.apache.cassandra.cql3.selection.Selectable;
import org.apache.cassandra.cql3.selection.Selection;
import org.apache.cassandra.cql3.selection.Selection.Selectors;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.aggregation.AggregationSpecification;
import org.apache.cassandra.db.aggregation.GroupMaker;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.db.marshal.CollectionType;
import org.apache.cassandra.db.marshal.CompositeType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.UserType;
import org.apache.cassandra.db.partitions.PartitionIterator;
import org.apache.cassandra.db.rows.ComplexColumnData;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.rows.RowIterator;
import org.apache.cassandra.db.view.View;
import org.apache.cassandra.dht.AbstractBounds;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.index.IndexRegistry;
import org.apache.cassandra.serializers.MarshalException;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.ClientWarn;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.pager.AggregationQueryPager;
import org.apache.cassandra.service.pager.PagingState;
import org.apache.cassandra.service.pager.QueryPager;
import org.apache.cassandra.transport.ProtocolVersion;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkFalse;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkNotNull;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkNull;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkTrue;
import static org.apache.cassandra.utils.ByteBufferUtil.UNSET_BYTE_BUFFER;
import static org.apache.cassandra.utils.Clock.Global.nanoTime;
/**
* Encapsulates a completely parsed SELECT query, including the target
* column family, expression, result count, and ordering clause.
*
* A number of public methods here are only used internally. However,
* many of these are made accessible for the benefit of custom
* QueryHandler implementations, so before reducing their accessibility
* due consideration should be given.
*/
public class SelectStatement implements CQLStatement.SingleKeyspaceCqlStatement
{
private static final Logger logger = LoggerFactory.getLogger(SelectStatement.class);
public static final int DEFAULT_PAGE_SIZE = 10000;
public final VariableSpecifications bindVariables;
public final TableMetadata table;
public final Parameters parameters;
private final Selection selection;
private final Term limit;
private final Term perPartitionLimit;
private final StatementRestrictions restrictions;
private final boolean isReversed;
/**
* The <code>AggregationSpecification</code> used to make the aggregates.
*/
private final AggregationSpecification aggregationSpec;
/**
* The comparator used to orders results when multiple keys are selected (using IN).
*/
private final Comparator<List<ByteBuffer>> orderingComparator;
// Used by forSelection below
private static final Parameters defaultParameters = new Parameters(Collections.emptyMap(),
Collections.emptyList(),
false,
false,
false);
public SelectStatement(TableMetadata table,
VariableSpecifications bindVariables,
Parameters parameters,
Selection selection,
StatementRestrictions restrictions,
boolean isReversed,
AggregationSpecification aggregationSpec,
Comparator<List<ByteBuffer>> orderingComparator,
Term limit,
Term perPartitionLimit)
{
this.table = table;
this.bindVariables = bindVariables;
this.selection = selection;
this.restrictions = restrictions;
this.isReversed = isReversed;
this.aggregationSpec = aggregationSpec;
this.orderingComparator = orderingComparator;
this.parameters = parameters;
this.limit = limit;
this.perPartitionLimit = perPartitionLimit;
}
@Override
public List<ColumnSpecification> getBindVariables()
{
return bindVariables.getBindVariables();
}
@Override
public short[] getPartitionKeyBindVariableIndexes()
{
return bindVariables.getPartitionKeyBindVariableIndexes(table);
}
@Override
public Iterable<Function> getFunctions()
{
List<Function> functions = new ArrayList<>();
addFunctionsTo(functions);
return functions;
}
private void addFunctionsTo(List<Function> functions)
{
selection.addFunctionsTo(functions);
restrictions.addFunctionsTo(functions);
if (limit != null)
limit.addFunctionsTo(functions);
if (perPartitionLimit != null)
perPartitionLimit.addFunctionsTo(functions);
}
/**
* The columns to fetch internally for this SELECT statement (which can be more than the one selected by the
* user as it also include any restricted column in particular).
*/
public ColumnFilter queriedColumns()
{
return selection.newSelectors(QueryOptions.DEFAULT).getColumnFilter();
}
// Creates a simple select based on the given selection.
// Note that the results select statement should not be used for actual queries, but only for processing already
// queried data through processColumnFamily.
static SelectStatement forSelection(TableMetadata table, Selection selection)
{
return new SelectStatement(table,
VariableSpecifications.empty(),
defaultParameters,
selection,
StatementRestrictions.empty(StatementType.SELECT, table),
false,
null,
null,
null,
null);
}
public ResultSet.ResultMetadata getResultMetadata()
{
return selection.getResultMetadata();
}
public void authorize(ClientState state) throws InvalidRequestException, UnauthorizedException
{
if (table.isView())
{
TableMetadataRef baseTable = View.findBaseTable(keyspace(), table());
if (baseTable != null)
state.ensureTablePermission(baseTable, Permission.SELECT);
}
else
{
state.ensureTablePermission(table, Permission.SELECT);
}
for (Function function : getFunctions())
state.ensurePermission(Permission.EXECUTE, function);
}
public void validate(ClientState state) throws InvalidRequestException
{
// Nothing to do, all validation has been done by RawStatement.prepare()
}
public ResultMessage.Rows execute(QueryState state, QueryOptions options, long queryStartNanoTime)
{
ConsistencyLevel cl = options.getConsistency();
checkNotNull(cl, "Invalid empty consistency level");
cl.validateForRead();
int nowInSec = options.getNowInSeconds(state);
int userLimit = getLimit(options);
int userPerPartitionLimit = getPerPartitionLimit(options);
int pageSize = options.getPageSize();
Selectors selectors = selection.newSelectors(options);
ReadQuery query = getQuery(options, state.getClientState(), selectors.getColumnFilter(),
nowInSec, userLimit, userPerPartitionLimit, pageSize);
if (options.isTrackWarningsEnabled())
query.trackWarnings();
if (aggregationSpec == null && (pageSize <= 0 || (query.limits().count() <= pageSize)))
return execute(query, options, state, selectors, nowInSec, userLimit, queryStartNanoTime);
QueryPager pager = getPager(query, options);
return execute(state,
Pager.forDistributedQuery(pager, cl, state.getClientState()),
options,
selectors,
pageSize,
nowInSec,
userLimit,
queryStartNanoTime);
}
public ReadQuery getQuery(QueryOptions options, int nowInSec) throws RequestValidationException
{
Selectors selectors = selection.newSelectors(options);
return getQuery(options,
ClientState.forInternalCalls(),
selectors.getColumnFilter(),
nowInSec,
getLimit(options),
getPerPartitionLimit(options),
options.getPageSize());
}
public ReadQuery getQuery(QueryOptions options,
ClientState state,
ColumnFilter columnFilter,
int nowInSec,
int userLimit,
int perPartitionLimit,
int pageSize)
{
boolean isPartitionRangeQuery = restrictions.isKeyRange() || restrictions.usesSecondaryIndexing();
DataLimits limit = getDataLimits(userLimit, perPartitionLimit, pageSize);
if (isPartitionRangeQuery)
return getRangeCommand(options, columnFilter, limit, nowInSec);
return getSliceCommands(options, state, columnFilter, limit, nowInSec);
}
private ResultMessage.Rows execute(ReadQuery query,
QueryOptions options,
QueryState state,
Selectors selectors,
int nowInSec,
int userLimit, long queryStartNanoTime) throws RequestValidationException, RequestExecutionException
{
try (PartitionIterator data = query.execute(options.getConsistency(), state.getClientState(), queryStartNanoTime))
{
return processResults(data, options, selectors, nowInSec, userLimit);
}
}
@Override
public AuditLogContext getAuditLogContext()
{
return new AuditLogContext(AuditLogEntryType.SELECT, keyspace(), table.name);
}
// Simple wrapper class to avoid some code duplication
private static abstract class Pager
{
protected QueryPager pager;
protected Pager(QueryPager pager)
{
this.pager = pager;
}
public static Pager forInternalQuery(QueryPager pager, ReadExecutionController executionController)
{
return new InternalPager(pager, executionController);
}
public static Pager forDistributedQuery(QueryPager pager, ConsistencyLevel consistency, ClientState clientState)
{
return new NormalPager(pager, consistency, clientState);
}
public boolean isExhausted()
{
return pager.isExhausted();
}
public PagingState state()
{
return pager.state();
}
public abstract PartitionIterator fetchPage(int pageSize, long queryStartNanoTime);
public static class NormalPager extends Pager
{
private final ConsistencyLevel consistency;
private final ClientState clientState;
private NormalPager(QueryPager pager, ConsistencyLevel consistency, ClientState clientState)
{
super(pager);
this.consistency = consistency;
this.clientState = clientState;
}
public PartitionIterator fetchPage(int pageSize, long queryStartNanoTime)
{
return pager.fetchPage(pageSize, consistency, clientState, queryStartNanoTime);
}
}
public static class InternalPager extends Pager
{
private final ReadExecutionController executionController;
private InternalPager(QueryPager pager, ReadExecutionController executionController)
{
super(pager);
this.executionController = executionController;
}
public PartitionIterator fetchPage(int pageSize, long queryStartNanoTime)
{
return pager.fetchPageInternal(pageSize, executionController);
}
}
}
private ResultMessage.Rows execute(QueryState state,
Pager pager,
QueryOptions options,
Selectors selectors,
int pageSize,
int nowInSec,
int userLimit,
long queryStartNanoTime) throws RequestValidationException, RequestExecutionException
{
Guardrails.pageSize.guard(pageSize, table(), state.getClientState());
if (aggregationSpec != null)
{
if (!restrictions.hasPartitionKeyRestrictions())
{
warn("Aggregation query used without partition key");
}
else if (restrictions.keyIsInRelation())
{
warn("Aggregation query used on multiple partition keys (IN restriction)");
}
}
// We can't properly do post-query ordering if we page (see #6722)
// For GROUP BY or aggregation queries we always page internally even if the user has turned paging off
checkFalse(pageSize > 0 && needsPostQueryOrdering(),
"Cannot page queries with both ORDER BY and a IN restriction on the partition key;"
+ " you must either remove the ORDER BY or the IN and sort client side, or disable paging for this query");
ResultMessage.Rows msg;
try (PartitionIterator page = pager.fetchPage(pageSize, queryStartNanoTime))
{
msg = processResults(page, options, selectors, nowInSec, userLimit);
}
// Please note that the isExhausted state of the pager only gets updated when we've closed the page, so this
// shouldn't be moved inside the 'try' above.
if (!pager.isExhausted())
msg.result.metadata.setHasMorePages(pager.state());
return msg;
}
private void warn(String msg)
{
logger.warn(msg);
ClientWarn.instance.warn(msg);
}
private ResultMessage.Rows processResults(PartitionIterator partitions,
QueryOptions options,
Selectors selectors,
int nowInSec,
int userLimit) throws RequestValidationException
{
ResultSet rset = process(partitions, options, selectors, nowInSec, userLimit);
return new ResultMessage.Rows(rset);
}
public ResultMessage.Rows executeLocally(QueryState state, QueryOptions options) throws RequestExecutionException, RequestValidationException
{
return executeInternal(state, options, options.getNowInSeconds(state), nanoTime());
}
public ResultMessage.Rows executeInternal(QueryState state, QueryOptions options, int nowInSec, long queryStartNanoTime) throws RequestExecutionException, RequestValidationException
{
int userLimit = getLimit(options);
int userPerPartitionLimit = getPerPartitionLimit(options);
int pageSize = options.getPageSize();
Selectors selectors = selection.newSelectors(options);
ReadQuery query = getQuery(options, state.getClientState(), selectors.getColumnFilter(), nowInSec, userLimit,
userPerPartitionLimit, pageSize);
try (ReadExecutionController executionController = query.executionController())
{
if (aggregationSpec == null && (pageSize <= 0 || (query.limits().count() <= pageSize)))
{
try (PartitionIterator data = query.executeInternal(executionController))
{
return processResults(data, options, selectors, nowInSec, userLimit);
}
}
QueryPager pager = getPager(query, options);
return execute(state,
Pager.forInternalQuery(pager, executionController),
options,
selectors,
pageSize,
nowInSec,
userLimit,
queryStartNanoTime);
}
}
private QueryPager getPager(ReadQuery query, QueryOptions options)
{
QueryPager pager = query.getPager(options.getPagingState(), options.getProtocolVersion());
if (aggregationSpec == null || query.isEmpty())
return pager;
return new AggregationQueryPager(pager, query.limits());
}
public ResultSet process(PartitionIterator partitions, int nowInSec) throws InvalidRequestException
{
QueryOptions options = QueryOptions.DEFAULT;
Selectors selectors = selection.newSelectors(options);
return process(partitions, options, selectors, nowInSec, getLimit(options));
}
@Override
public String keyspace()
{
return table.keyspace;
}
public String table()
{
return table.name;
}
/**
* May be used by custom QueryHandler implementations
*/
public Selection getSelection()
{
return selection;
}
/**
* May be used by custom QueryHandler implementations
*/
public StatementRestrictions getRestrictions()
{
return restrictions;
}
private ReadQuery getSliceCommands(QueryOptions options, ClientState state, ColumnFilter columnFilter,
DataLimits limit, int nowInSec)
{
Collection<ByteBuffer> keys = restrictions.getPartitionKeys(options);
if (keys.isEmpty())
return ReadQuery.empty(table);
if (restrictions.keyIsInRelation())
{
Guardrails.partitionKeysInSelect.guard(keys.size(), table.name, state);
}
ClusteringIndexFilter filter = makeClusteringIndexFilter(options, columnFilter);
if (filter == null || filter.isEmpty(table.comparator))
return ReadQuery.empty(table);
RowFilter rowFilter = getRowFilter(options);
List<DecoratedKey> decoratedKeys = new ArrayList<>(keys.size());
for (ByteBuffer key : keys)
{
QueryProcessor.validateKey(key);
decoratedKeys.add(table.partitioner.decorateKey(ByteBufferUtil.clone(key)));
}
return SinglePartitionReadQuery.createGroup(table, nowInSec, columnFilter, rowFilter, limit, decoratedKeys, filter);
}
/**
* Returns the slices fetched by this SELECT, assuming an internal call (no bound values in particular).
* <p>
* Note that if the SELECT intrinsically selects rows by names, we convert them into equivalent slices for
* the purpose of this method. This is used for MVs to restrict what needs to be read when we want to read
* everything that could be affected by a given view (and so, if the view SELECT statement has restrictions
* on the clustering columns, we can restrict what we read).
*/
public Slices clusteringIndexFilterAsSlices()
{
QueryOptions options = QueryOptions.forInternalCalls(Collections.emptyList());
ColumnFilter columnFilter = selection.newSelectors(options).getColumnFilter();
ClusteringIndexFilter filter = makeClusteringIndexFilter(options, columnFilter);
if (filter instanceof ClusteringIndexSliceFilter)
return ((ClusteringIndexSliceFilter)filter).requestedSlices();
Slices.Builder builder = new Slices.Builder(table.comparator);
for (Clustering<?> clustering: ((ClusteringIndexNamesFilter)filter).requestedRows())
builder.add(Slice.make(clustering));
return builder.build();
}
/**
* Returns a read command that can be used internally to query all the rows queried by this SELECT for a
* give key (used for materialized views).
*/
public SinglePartitionReadCommand internalReadForView(DecoratedKey key, int nowInSec)
{
QueryOptions options = QueryOptions.forInternalCalls(Collections.emptyList());
ColumnFilter columnFilter = selection.newSelectors(options).getColumnFilter();
ClusteringIndexFilter filter = makeClusteringIndexFilter(options, columnFilter);
RowFilter rowFilter = getRowFilter(options);
return SinglePartitionReadCommand.create(table, nowInSec, columnFilter, rowFilter, DataLimits.NONE, key, filter);
}
/**
* The {@code RowFilter} for this SELECT, assuming an internal call (no bound values in particular).
*/
public RowFilter rowFilterForInternalCalls()
{
return getRowFilter(QueryOptions.forInternalCalls(Collections.emptyList()));
}
private ReadQuery getRangeCommand(QueryOptions options, ColumnFilter columnFilter, DataLimits limit, int nowInSec)
{
ClusteringIndexFilter clusteringIndexFilter = makeClusteringIndexFilter(options, columnFilter);
if (clusteringIndexFilter == null)
return ReadQuery.empty(table);
RowFilter rowFilter = getRowFilter(options);
// The LIMIT provided by the user is the number of CQL row he wants returned.
// We want to have getRangeSlice to count the number of columns, not the number of keys.
AbstractBounds<PartitionPosition> keyBounds = restrictions.getPartitionKeyBounds(options);
if (keyBounds == null)
return ReadQuery.empty(table);
ReadQuery command =
PartitionRangeReadQuery.create(table, nowInSec, columnFilter, rowFilter, limit, new DataRange(keyBounds, clusteringIndexFilter));
// If there's a secondary index that the command can use, have it validate the request parameters.
command.maybeValidateIndex();
return command;
}
private ClusteringIndexFilter makeClusteringIndexFilter(QueryOptions options, ColumnFilter columnFilter)
{
if (parameters.isDistinct)
{
// We need to be able to distinguish between partition having live rows and those that don't. But
// doing so is not trivial since "having a live row" depends potentially on
// 1) when the query is performed, due to TTLs
// 2) how thing reconcile together between different nodes
// so that it's hard to really optimize properly internally. So to keep it simple, we simply query
// for the first row of the partition and hence uses Slices.ALL. We'll limit it to the first live
// row however in getLimit().
return new ClusteringIndexSliceFilter(Slices.ALL, false);
}
if (restrictions.isColumnRange())
{
Slices slices = makeSlices(options);
if (slices == Slices.NONE && !selection.containsStaticColumns())
return null;
return new ClusteringIndexSliceFilter(slices, isReversed);
}
NavigableSet<Clustering<?>> clusterings = getRequestedRows(options);
// We can have no clusterings if either we're only selecting the static columns, or if we have
// a 'IN ()' for clusterings. In that case, we still want to query if some static columns are
// queried. But we're fine otherwise.
if (clusterings.isEmpty() && columnFilter.fetchedColumns().statics.isEmpty())
return null;
return new ClusteringIndexNamesFilter(clusterings, isReversed);
}
@VisibleForTesting
public Slices makeSlices(QueryOptions options)
throws InvalidRequestException
{
SortedSet<ClusteringBound<?>> startBounds = restrictions.getClusteringColumnsBounds(Bound.START, options);
SortedSet<ClusteringBound<?>> endBounds = restrictions.getClusteringColumnsBounds(Bound.END, options);
assert startBounds.size() == endBounds.size();
// The case where startBounds == 1 is common enough that it's worth optimizing
if (startBounds.size() == 1)
{
ClusteringBound<?> start = startBounds.first();
ClusteringBound<?> end = endBounds.first();
return Slice.isEmpty(table.comparator, start, end)
? Slices.NONE
: Slices.with(table.comparator, Slice.make(start, end));
}
Slices.Builder builder = new Slices.Builder(table.comparator, startBounds.size());
Iterator<ClusteringBound<?>> startIter = startBounds.iterator();
Iterator<ClusteringBound<?>> endIter = endBounds.iterator();
while (startIter.hasNext() && endIter.hasNext())
{
ClusteringBound<?> start = startIter.next();
ClusteringBound<?> end = endIter.next();
// Ignore slices that are nonsensical
if (Slice.isEmpty(table.comparator, start, end))
continue;
builder.add(start, end);
}
return builder.build();
}
private DataLimits getDataLimits(int userLimit, int perPartitionLimit, int pageSize)
{
int cqlRowLimit = DataLimits.NO_LIMIT;
int cqlPerPartitionLimit = DataLimits.NO_LIMIT;
// If we do post ordering we need to get all the results sorted before we can trim them.
if (aggregationSpec != AggregationSpecification.AGGREGATE_EVERYTHING)
{
if (!needsPostQueryOrdering())
cqlRowLimit = userLimit;
cqlPerPartitionLimit = perPartitionLimit;
}
// Group by and aggregation queries will always be paged internally to avoid OOM.
// If the user provided a pageSize we'll use that to page internally (because why not), otherwise we use our default
if (pageSize <= 0)
pageSize = DEFAULT_PAGE_SIZE;
// Aggregation queries work fine on top of the group by paging but to maintain
// backward compatibility we need to use the old way.
if (aggregationSpec != null && aggregationSpec != AggregationSpecification.AGGREGATE_EVERYTHING)
{
if (parameters.isDistinct)
return DataLimits.distinctLimits(cqlRowLimit);
return DataLimits.groupByLimits(cqlRowLimit,
cqlPerPartitionLimit,
pageSize,
aggregationSpec);
}
if (parameters.isDistinct)
return cqlRowLimit == DataLimits.NO_LIMIT ? DataLimits.DISTINCT_NONE : DataLimits.distinctLimits(cqlRowLimit);
return DataLimits.cqlLimits(cqlRowLimit, cqlPerPartitionLimit);
}
/**
* Returns the limit specified by the user.
* May be used by custom QueryHandler implementations
*
* @return the limit specified by the user or <code>DataLimits.NO_LIMIT</code> if no value
* as been specified.
*/
public int getLimit(QueryOptions options)
{
return getLimit(limit, options);
}
/**
* Returns the per partition limit specified by the user.
* May be used by custom QueryHandler implementations
*
* @return the per partition limit specified by the user or <code>DataLimits.NO_LIMIT</code> if no value
* as been specified.
*/
public int getPerPartitionLimit(QueryOptions options)
{
return getLimit(perPartitionLimit, options);
}
private int getLimit(Term limit, QueryOptions options)
{
int userLimit = DataLimits.NO_LIMIT;
if (limit != null)
{
ByteBuffer b = checkNotNull(limit.bindAndGet(options), "Invalid null value of limit");
// treat UNSET limit value as 'unlimited'
if (b != UNSET_BYTE_BUFFER)
{
try
{
Int32Type.instance.validate(b);
userLimit = Int32Type.instance.compose(b);
checkTrue(userLimit > 0, "LIMIT must be strictly positive");
}
catch (MarshalException e)
{
throw new InvalidRequestException("Invalid limit value");
}
}
}
return userLimit;
}
private NavigableSet<Clustering<?>> getRequestedRows(QueryOptions options) throws InvalidRequestException
{
// Note: getRequestedColumns don't handle static columns, but due to CASSANDRA-5762
// we always do a slice for CQL3 tables, so it's ok to ignore them here
assert !restrictions.isColumnRange();
return restrictions.getClusteringColumns(options);
}
/**
* May be used by custom QueryHandler implementations
*/
public RowFilter getRowFilter(QueryOptions options) throws InvalidRequestException
{
IndexRegistry indexRegistry = IndexRegistry.obtain(table);
return restrictions.getRowFilter(indexRegistry, options);
}
private ResultSet process(PartitionIterator partitions,
QueryOptions options,
Selectors selectors,
int nowInSec,
int userLimit) throws InvalidRequestException
{
GroupMaker groupMaker = aggregationSpec == null ? null : aggregationSpec.newGroupMaker();
ResultSetBuilder result = new ResultSetBuilder(getResultMetadata(), selectors, groupMaker);
while (partitions.hasNext())
{
try (RowIterator partition = partitions.next())
{
processPartition(partition, options, result, nowInSec);
}
}
ResultSet cqlRows = result.build();
maybeWarn(result, options);
orderResults(cqlRows);
cqlRows.trim(userLimit);
return cqlRows;
}
public static ByteBuffer[] getComponents(TableMetadata metadata, DecoratedKey dk)
{
ByteBuffer key = dk.getKey();
if (metadata.partitionKeyType instanceof CompositeType)
{
return ((CompositeType)metadata.partitionKeyType).split(key);
}
else
{
return new ByteBuffer[]{ key };
}
}
private void maybeWarn(ResultSetBuilder result, QueryOptions options)
{
if (!options.isTrackWarningsEnabled())
return;
ColumnFamilyStore store = cfs();
if (store != null)
store.metric.coordinatorReadSize.update(result.getSize());
if (result.shouldWarn(options.getCoordinatorReadSizeWarnThresholdKB()))
{
String msg = String.format("Read on table %s has exceeded the size warning threshold of %,d kb", table, options.getCoordinatorReadSizeWarnThresholdKB());
ClientWarn.instance.warn(msg + " with " + loggableTokens(options));
logger.warn("{} with query {}", msg, asCQL(options));
if (store != null)
store.metric.coordinatorReadSizeWarnings.mark();
}
}
private void maybeFail(ResultSetBuilder result, QueryOptions options)
{
if (!options.isTrackWarningsEnabled())
return;
if (result.shouldReject(options.getCoordinatorReadSizeAbortThresholdKB()))
{
String msg = String.format("Read on table %s has exceeded the size failure threshold of %,d kb", table, options.getCoordinatorReadSizeAbortThresholdKB());
String clientMsg = msg + " with " + loggableTokens(options);
ClientWarn.instance.warn(clientMsg);
logger.warn("{} with query {}", msg, asCQL(options));
ColumnFamilyStore store = cfs();
if (store != null)
{
store.metric.coordinatorReadSizeAborts.mark();
store.metric.coordinatorReadSize.update(result.getSize());
}
// read errors require blockFor and recieved (its in the protocol message), but this isn't known;
// to work around this, treat the coordinator as the only response we care about and mark it failed
ReadSizeAbortException exception = new ReadSizeAbortException(clientMsg, options.getConsistency(), 0, 1, true,
ImmutableMap.of(FBUtilities.getBroadcastAddressAndPort(), RequestFailureReason.READ_SIZE));
StorageProxy.recordReadRegularAbort(options.getConsistency(), exception);
throw exception;
}
}
private ColumnFamilyStore cfs()
{
return Schema.instance.getColumnFamilyStoreInstance(table.id);
}
// Used by ModificationStatement for CAS operations
public void processPartition(RowIterator partition, QueryOptions options, ResultSetBuilder result, int nowInSec)
throws InvalidRequestException
{
maybeFail(result, options);
ProtocolVersion protocolVersion = options.getProtocolVersion();
ByteBuffer[] keyComponents = getComponents(table, partition.partitionKey());
Row staticRow = partition.staticRow();
// If there is no rows, we include the static content if we should and we're done.
if (!partition.hasNext())
{
if (!staticRow.isEmpty() && restrictions.returnStaticContentOnPartitionWithNoRows())
{
result.newRow(partition.partitionKey(), staticRow.clustering());
maybeFail(result, options);
for (ColumnMetadata def : selection.getColumns())
{
switch (def.kind)
{
case PARTITION_KEY:
result.add(keyComponents[def.position()]);
break;
case STATIC:
addValue(result, def, staticRow, nowInSec, protocolVersion);
break;
default:
result.add((ByteBuffer)null);
}
}
}
return;
}
while (partition.hasNext())
{
Row row = partition.next();
result.newRow( partition.partitionKey(), row.clustering());
// reads aren't failed as soon the size exceeds the failure threshold, they're failed once the failure
// threshold has been exceeded and we start adding more data. We're slightly more permissive to avoid
// cases where a row can never be read. Since we only warn/fail after entire rows are read, this will
// still allow the entire dataset to be read with LIMIT 1 queries, even if every row is oversized
maybeFail(result, options);
// Respect selection order
for (ColumnMetadata def : selection.getColumns())
{
switch (def.kind)
{
case PARTITION_KEY:
result.add(keyComponents[def.position()]);
break;
case CLUSTERING:
result.add(row.clustering().bufferAt(def.position()));
break;
case REGULAR:
addValue(result, def, row, nowInSec, protocolVersion);
break;
case STATIC:
addValue(result, def, staticRow, nowInSec, protocolVersion);
break;
}
}
}
}
private static void addValue(ResultSetBuilder result, ColumnMetadata def, Row row, int nowInSec, ProtocolVersion protocolVersion)
{
if (def.isComplex())
{
assert def.type.isMultiCell();
ComplexColumnData complexData = row.getComplexColumnData(def);
if (complexData == null)
result.add(null);
else if (def.type.isCollection())
result.add(((CollectionType) def.type).serializeForNativeProtocol(complexData.iterator(), protocolVersion));
else
result.add(((UserType) def.type).serializeForNativeProtocol(complexData.iterator(), protocolVersion));
}
else
{
result.add(row.getCell(def), nowInSec);
}
}
private boolean needsPostQueryOrdering()
{
// We need post-query ordering only for queries with IN on the partition key and an ORDER BY.
return restrictions.keyIsInRelation() && !parameters.orderings.isEmpty();
}
/**
* Orders results when multiple keys are selected (using IN)
*/
private void orderResults(ResultSet cqlRows)
{
if (cqlRows.size() == 0 || !needsPostQueryOrdering())
return;
Collections.sort(cqlRows.rows, orderingComparator);
}
public static class RawStatement extends QualifiedStatement
{
public final Parameters parameters;
public final List<RawSelector> selectClause;
public final WhereClause whereClause;
public final Term.Raw limit;
public final Term.Raw perPartitionLimit;
public RawStatement(QualifiedName cfName,
Parameters parameters,
List<RawSelector> selectClause,
WhereClause whereClause,
Term.Raw limit,
Term.Raw perPartitionLimit)
{
super(cfName);
this.parameters = parameters;
this.selectClause = selectClause;
this.whereClause = whereClause;
this.limit = limit;
this.perPartitionLimit = perPartitionLimit;
}
public SelectStatement prepare(ClientState state)
{
return prepare(false);
}
public SelectStatement prepare(boolean forView) throws InvalidRequestException
{
TableMetadata table = Schema.instance.validateTable(keyspace(), name());
List<Selectable> selectables = RawSelector.toSelectables(selectClause, table);
boolean containsOnlyStaticColumns = selectOnlyStaticColumns(table, selectables);
StatementRestrictions restrictions = prepareRestrictions(table, bindVariables, containsOnlyStaticColumns, forView);
// If we order post-query, the sorted column needs to be in the ResultSet for sorting,
// even if we don't ultimately ship them to the client (CASSANDRA-4911).
Map<ColumnMetadata, Boolean> orderingColumns = getOrderingColumns(table);
Set<ColumnMetadata> resultSetOrderingColumns = restrictions.keyIsInRelation() ? orderingColumns.keySet()
: Collections.emptySet();
Selection selection = prepareSelection(table,
selectables,
bindVariables,
resultSetOrderingColumns,
restrictions);
if (parameters.isDistinct)
{
checkNull(perPartitionLimit, "PER PARTITION LIMIT is not allowed with SELECT DISTINCT queries");
validateDistinctSelection(table, selection, restrictions);
}
AggregationSpecification aggregationSpec = getAggregationSpecification(table,
selection,
restrictions,
parameters.isDistinct);
checkFalse(aggregationSpec == AggregationSpecification.AGGREGATE_EVERYTHING && perPartitionLimit != null,
"PER PARTITION LIMIT is not allowed with aggregate queries.");
Comparator<List<ByteBuffer>> orderingComparator = null;
boolean isReversed = false;
if (!orderingColumns.isEmpty())
{
assert !forView;
verifyOrderingIsAllowed(restrictions);
orderingComparator = getOrderingComparator(selection, restrictions, orderingColumns);
isReversed = isReversed(table, orderingColumns, restrictions);
if (isReversed)
orderingComparator = Collections.reverseOrder(orderingComparator);
}
checkNeedsFiltering(restrictions);
return new SelectStatement(table,
bindVariables,
parameters,
selection,
restrictions,
isReversed,
aggregationSpec,
orderingComparator,
prepareLimit(bindVariables, limit, keyspace(), limitReceiver()),
prepareLimit(bindVariables, perPartitionLimit, keyspace(), perPartitionLimitReceiver()));
}
private Selection prepareSelection(TableMetadata table,
List<Selectable> selectables,
VariableSpecifications boundNames,
Set<ColumnMetadata> resultSetOrderingColumns,
StatementRestrictions restrictions)
{
boolean hasGroupBy = !parameters.groups.isEmpty();
if (selectables.isEmpty()) // wildcard query
{
return hasGroupBy ? Selection.wildcardWithGroupBy(table, boundNames, parameters.isJson, restrictions.returnStaticContentOnPartitionWithNoRows())
: Selection.wildcard(table, parameters.isJson, restrictions.returnStaticContentOnPartitionWithNoRows());
}
return Selection.fromSelectors(table,
selectables,
boundNames,
resultSetOrderingColumns,
restrictions.nonPKRestrictedColumns(false),
hasGroupBy,
parameters.isJson,
restrictions.returnStaticContentOnPartitionWithNoRows());
}
/**
* Checks if the specified selectables select only partition key columns or static columns
*
* @param table the table metadata
* @param selectables the selectables to check
* @return {@code true} if the specified selectables select only partition key columns or static columns,
* {@code false} otherwise.
*/
private boolean selectOnlyStaticColumns(TableMetadata table, List<Selectable> selectables)
{
if (table.isStaticCompactTable())
return false;
if (!table.hasStaticColumns() || selectables.isEmpty())
return false;
return Selectable.selectColumns(selectables, (column) -> column.isStatic())
&& !Selectable.selectColumns(selectables, (column) -> !column.isPartitionKey() && !column.isStatic());
}
/**
* Returns the columns used to order the data.
* @return the columns used to order the data.
*/
private Map<ColumnMetadata, Boolean> getOrderingColumns(TableMetadata table)
{
if (parameters.orderings.isEmpty())
return Collections.emptyMap();
Map<ColumnMetadata, Boolean> orderingColumns = new LinkedHashMap<>();
for (Map.Entry<ColumnIdentifier, Boolean> entry : parameters.orderings.entrySet())
{
orderingColumns.put(table.getExistingColumn(entry.getKey()), entry.getValue());
}
return orderingColumns;
}
/**
* Prepares the restrictions.
*
* @param metadata the column family meta data
* @param boundNames the variable specifications
* @param selectsOnlyStaticColumns {@code true} if the query select only static columns, {@code false} otherwise.
* @return the restrictions
* @throws InvalidRequestException if a problem occurs while building the restrictions
*/
private StatementRestrictions prepareRestrictions(TableMetadata metadata,
VariableSpecifications boundNames,
boolean selectsOnlyStaticColumns,
boolean forView) throws InvalidRequestException
{
return new StatementRestrictions(StatementType.SELECT,
metadata,
whereClause,
boundNames,
selectsOnlyStaticColumns,
parameters.allowFiltering,
forView);
}
/** Returns a Term for the limit or null if no limit is set */
private Term prepareLimit(VariableSpecifications boundNames, Term.Raw limit,
String keyspace, ColumnSpecification limitReceiver) throws InvalidRequestException
{
if (limit == null)
return null;
Term prepLimit = limit.prepare(keyspace, limitReceiver);
prepLimit.collectMarkerSpecification(boundNames);
return prepLimit;
}
private static void verifyOrderingIsAllowed(StatementRestrictions restrictions) throws InvalidRequestException
{
checkFalse(restrictions.usesSecondaryIndexing(), "ORDER BY with 2ndary indexes is not supported.");
checkFalse(restrictions.isKeyRange(), "ORDER BY is only supported when the partition key is restricted by an EQ or an IN.");
}
private static void validateDistinctSelection(TableMetadata metadata,
Selection selection,
StatementRestrictions restrictions)
throws InvalidRequestException
{
checkFalse(restrictions.hasClusteringColumnsRestrictions() ||
(restrictions.hasNonPrimaryKeyRestrictions() && !restrictions.nonPKRestrictedColumns(true).stream().allMatch(ColumnMetadata::isStatic)),
"SELECT DISTINCT with WHERE clause only supports restriction by partition key and/or static columns.");
Collection<ColumnMetadata> requestedColumns = selection.getColumns();
for (ColumnMetadata def : requestedColumns)
checkFalse(!def.isPartitionKey() && !def.isStatic(),
"SELECT DISTINCT queries must only request partition key columns and/or static columns (not %s)",
def.name);
// If it's a key range, we require that all partition key columns are selected so we don't have to bother
// with post-query grouping.
if (!restrictions.isKeyRange())
return;
for (ColumnMetadata def : metadata.partitionKeyColumns())
checkTrue(requestedColumns.contains(def),
"SELECT DISTINCT queries must request all the partition key columns (missing %s)", def.name);
}
/**
* Creates the <code>AggregationSpecification</code>s used to make the aggregates.
*
* @param metadata the table metadata
* @param selection the selection
* @param restrictions the restrictions
* @param isDistinct <code>true</code> if the query is a DISTINCT one.
* @return the <code>AggregationSpecification</code>s used to make the aggregates
*/
private AggregationSpecification getAggregationSpecification(TableMetadata metadata,
Selection selection,
StatementRestrictions restrictions,
boolean isDistinct)
{
if (parameters.groups.isEmpty())
return selection.isAggregate() ? AggregationSpecification.AGGREGATE_EVERYTHING
: null;
int clusteringPrefixSize = 0;
Iterator<ColumnMetadata> pkColumns = metadata.primaryKeyColumns().iterator();
for (ColumnIdentifier id : parameters.groups)
{
ColumnMetadata def = metadata.getExistingColumn(id);
checkTrue(def.isPartitionKey() || def.isClusteringColumn(),
"Group by is currently only supported on the columns of the PRIMARY KEY, got %s", def.name);
while (true)
{
checkTrue(pkColumns.hasNext(),
"Group by currently only support groups of columns following their declared order in the PRIMARY KEY");
ColumnMetadata pkColumn = pkColumns.next();
if (pkColumn.isClusteringColumn())
clusteringPrefixSize++;
// As we do not support grouping on only part of the partition key, we only need to know
// which clustering columns need to be used to build the groups
if (pkColumn.equals(def))
break;
checkTrue(restrictions.isColumnRestrictedByEq(pkColumn),
"Group by currently only support groups of columns following their declared order in the PRIMARY KEY");
}
}
checkFalse(pkColumns.hasNext() && pkColumns.next().isPartitionKey(),
"Group by is not supported on only a part of the partition key");
checkFalse(clusteringPrefixSize > 0 && isDistinct,
"Grouping on clustering columns is not allowed for SELECT DISTINCT queries");
return AggregationSpecification.aggregatePkPrefix(metadata.comparator, clusteringPrefixSize);
}
private Comparator<List<ByteBuffer>> getOrderingComparator(Selection selection,
StatementRestrictions restrictions,
Map<ColumnMetadata, Boolean> orderingColumns)
throws InvalidRequestException
{
if (!restrictions.keyIsInRelation())
return null;
List<Integer> idToSort = new ArrayList<Integer>(orderingColumns.size());
List<Comparator<ByteBuffer>> sorters = new ArrayList<Comparator<ByteBuffer>>(orderingColumns.size());
for (ColumnMetadata orderingColumn : orderingColumns.keySet())
{
idToSort.add(selection.getOrderingIndex(orderingColumn));
sorters.add(orderingColumn.type);
}
return idToSort.size() == 1 ? new SingleColumnComparator(idToSort.get(0), sorters.get(0))
: new CompositeComparator(sorters, idToSort);
}
private boolean isReversed(TableMetadata table, Map<ColumnMetadata, Boolean> orderingColumns, StatementRestrictions restrictions) throws InvalidRequestException
{
Boolean[] reversedMap = new Boolean[table.clusteringColumns().size()];
int i = 0;
for (Map.Entry<ColumnMetadata, Boolean> entry : orderingColumns.entrySet())
{
ColumnMetadata def = entry.getKey();
boolean reversed = entry.getValue();
checkTrue(def.isClusteringColumn(),
"Order by is currently only supported on the clustered columns of the PRIMARY KEY, got %s", def.name);
while (i != def.position())
{
checkTrue(restrictions.isColumnRestrictedByEq(table.clusteringColumns().get(i++)),
"Order by currently only supports the ordering of columns following their declared order in the PRIMARY KEY");
}
i++;
reversedMap[def.position()] = (reversed != def.isReversedType());
}
// Check that all boolean in reversedMap, if set, agrees
Boolean isReversed = null;
for (Boolean b : reversedMap)
{
// Column on which order is specified can be in any order
if (b == null)
continue;
if (isReversed == null)
{
isReversed = b;
continue;
}
checkTrue(isReversed.equals(b), "Unsupported order by relation");
}
assert isReversed != null;
return isReversed;
}
/** If ALLOW FILTERING was not specified, this verifies that it is not needed */
private void checkNeedsFiltering(StatementRestrictions restrictions) throws InvalidRequestException
{
// non-key-range non-indexed queries cannot involve filtering underneath
if (!parameters.allowFiltering && (restrictions.isKeyRange() || restrictions.usesSecondaryIndexing()))
{
// We will potentially filter data if either:
// - Have more than one IndexExpression
// - Have no index expression and the row filter is not the identity
checkFalse(restrictions.needFiltering(), StatementRestrictions.REQUIRES_ALLOW_FILTERING_MESSAGE);
}
}
private ColumnSpecification limitReceiver()
{
return new ColumnSpecification(keyspace(), name(), new ColumnIdentifier("[limit]", true), Int32Type.instance);
}
private ColumnSpecification perPartitionLimitReceiver()
{
return new ColumnSpecification(keyspace(), name(), new ColumnIdentifier("[per_partition_limit]", true), Int32Type.instance);
}
@Override
public String toString()
{
return MoreObjects.toStringHelper(this)
.add("name", qualifiedName)
.add("selectClause", selectClause)
.add("whereClause", whereClause)
.add("isDistinct", parameters.isDistinct)
.toString();
}
}
public static class Parameters
{
// Public because CASSANDRA-9858
public final Map<ColumnIdentifier, Boolean> orderings;
public final List<ColumnIdentifier> groups;
public final boolean isDistinct;
public final boolean allowFiltering;
public final boolean isJson;
public Parameters(Map<ColumnIdentifier, Boolean> orderings,
List<ColumnIdentifier> groups,
boolean isDistinct,
boolean allowFiltering,
boolean isJson)
{
this.orderings = orderings;
this.groups = groups;
this.isDistinct = isDistinct;
this.allowFiltering = allowFiltering;
this.isJson = isJson;
}
}
private static abstract class ColumnComparator<T> implements Comparator<T>
{
protected final int compare(Comparator<ByteBuffer> comparator, ByteBuffer aValue, ByteBuffer bValue)
{
if (aValue == null)
return bValue == null ? 0 : -1;
return bValue == null ? 1 : comparator.compare(aValue, bValue);
}
}
/**
* Used in orderResults(...) method when single 'ORDER BY' condition where given
*/
private static class SingleColumnComparator extends ColumnComparator<List<ByteBuffer>>
{
private final int index;
private final Comparator<ByteBuffer> comparator;
public SingleColumnComparator(int columnIndex, Comparator<ByteBuffer> orderer)
{
index = columnIndex;
comparator = orderer;
}
public int compare(List<ByteBuffer> a, List<ByteBuffer> b)
{
return compare(comparator, a.get(index), b.get(index));
}
}
/**
* Used in orderResults(...) method when multiple 'ORDER BY' conditions where given
*/
private static class CompositeComparator extends ColumnComparator<List<ByteBuffer>>
{
private final List<Comparator<ByteBuffer>> orderTypes;
private final List<Integer> positions;
private CompositeComparator(List<Comparator<ByteBuffer>> orderTypes, List<Integer> positions)
{
this.orderTypes = orderTypes;
this.positions = positions;
}
public int compare(List<ByteBuffer> a, List<ByteBuffer> b)
{
for (int i = 0; i < positions.size(); i++)
{
Comparator<ByteBuffer> type = orderTypes.get(i);
int columnPos = positions.get(i);
int comparison = compare(type, a.get(columnPos), b.get(columnPos));
if (comparison != 0)
return comparison;
}
return 0;
}
}
@Override
public String toString()
{
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
private String loggableTokens(QueryOptions options)
{
if (restrictions.isKeyRange() || restrictions.usesSecondaryIndexing())
{
AbstractBounds<PartitionPosition> bounds = restrictions.getPartitionKeyBounds(options);
return "token range: " + (bounds.inclusiveLeft() ? '[' : '(') +
bounds.left.getToken().toString() + ", " +
bounds.right.getToken().toString() +
(bounds.inclusiveRight() ? ']' : ')');
}
else
{
Collection<ByteBuffer> keys = restrictions.getPartitionKeys(options);
if (keys.size() == 1)
{
return "token: " + table.partitioner.getToken(Iterables.getOnlyElement(keys)).toString();
}
else
{
StringBuilder sb = new StringBuilder("tokens: [");
boolean isFirst = true;
for (ByteBuffer key : keys)
{
if (!isFirst) sb.append(", ");
sb.append(table.partitioner.getToken(key).toString());
isFirst = false;
}
return sb.append(']').toString();
}
}
}
private String asCQL(QueryOptions options)
{
ColumnFilter columnFilter = selection.newSelectors(options).getColumnFilter();
StringBuilder sb = new StringBuilder();
sb.append("SELECT ").append(queriedColumns().toCQLString());
sb.append(" FROM ").append(table.keyspace).append('.').append(table.name);
if (restrictions.isKeyRange() || restrictions.usesSecondaryIndexing())
{
// partition range
ClusteringIndexFilter clusteringIndexFilter = makeClusteringIndexFilter(options, columnFilter);
if (clusteringIndexFilter == null)
return "EMPTY";
RowFilter rowFilter = getRowFilter(options);
// The LIMIT provided by the user is the number of CQL row he wants returned.
// We want to have getRangeSlice to count the number of columns, not the number of keys.
AbstractBounds<PartitionPosition> keyBounds = restrictions.getPartitionKeyBounds(options);
if (keyBounds == null)
return "EMPTY";
DataRange dataRange = new DataRange(keyBounds, clusteringIndexFilter);
if (!dataRange.isUnrestricted(table) || !rowFilter.isEmpty())
{
sb.append(" WHERE ");
// We put the row filter first because the data range can end by "ORDER BY"
if (!rowFilter.isEmpty())
{
sb.append(rowFilter);
if (!dataRange.isUnrestricted(table))
sb.append(" AND ");
}
if (!dataRange.isUnrestricted(table))
sb.append(dataRange.toCQLString(table, rowFilter));
}
}
else
{
// single partition
Collection<ByteBuffer> keys = restrictions.getPartitionKeys(options);
if (keys.isEmpty())
return "EMPTY";
ClusteringIndexFilter filter = makeClusteringIndexFilter(options, columnFilter);
if (filter == null)
return "EMPTY";
sb.append(" WHERE ");
boolean compoundPk = table.partitionKeyColumns().size() > 1;
if (compoundPk) sb.append('(');
sb.append(ColumnMetadata.toCQLString(table.partitionKeyColumns()));
if (compoundPk) sb.append(')');
if (keys.size() == 1)
{
sb.append(" = ");
if (compoundPk) sb.append('(');
DataRange.appendKeyString(sb, table.partitionKeyType, Iterables.getOnlyElement(keys));
if (compoundPk) sb.append(')');
}
else
{
sb.append(" IN (");
boolean first = true;
for (ByteBuffer key : keys)
{
if (!first)
sb.append(", ");
if (compoundPk) sb.append('(');
DataRange.appendKeyString(sb, table.partitionKeyType, key);
if (compoundPk) sb.append(')');
first = false;
}
sb.append(')');
}
RowFilter rowFilter = getRowFilter(options);
if (!rowFilter.isEmpty())
sb.append(" AND ").append(rowFilter);
String filterString = filter.toCQLString(table, rowFilter);
if (!filterString.isEmpty())
sb.append(" AND ").append(filterString);
}
DataLimits limits = getDataLimits(getLimit(options), getPerPartitionLimit(options), options.getPageSize());
if (limits != DataLimits.NONE)
sb.append(' ').append(limits);
return sb.toString();
}
}
| |
/*
* Copyright (C) 2015 John Leacox
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.leacox.motif.cases;
import static com.leacox.motif.extract.matchers.ArgumentMatchers.any;
import static com.leacox.motif.extract.matchers.ArgumentMatchers.eq;
import com.leacox.motif.MatchesAny;
import com.leacox.motif.extract.DecomposableMatchBuilder0;
import com.leacox.motif.extract.DecomposableMatchBuilder1;
import com.leacox.motif.extract.matchers.Matcher;
import java.util.ArrayList;
import java.util.List;
/**
* Motif cases for matching primitives.
*
* @author John Leacox
*/
public final class PrimitiveCases {
private PrimitiveCases() {
}
// TODO: Are these cases actually useful or is the when(Object o) method good enough?
/**
* Matches a byte.
*/
public static DecomposableMatchBuilder0<Byte> caseByte(byte b) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(b));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Byte.class));
}
/**
* Matches a byte.
*
* <p>If matched, the byte value is extracted.
*/
public static DecomposableMatchBuilder1<Byte, Byte> caseByte(MatchesAny b) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(matchers, 0, new PrimitiveFieldExtractor<>(Byte.class));
}
/**
* Matches a short.
*/
public static DecomposableMatchBuilder0<Short> caseShort(short s) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(s));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Short.class));
}
/**
* Matches a short.
*
* <p>If matched, the short value is extracted.
*/
public static DecomposableMatchBuilder1<Short, Short> caseShort(MatchesAny s) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Short.class));
}
/**
* Matches a int.
*/
public static DecomposableMatchBuilder0<Integer> caseInt(int i) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(i));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Integer.class));
}
/**
* Matches a int.
*
* <p>If matched, the int value is extracted.
*/
public static DecomposableMatchBuilder1<Integer, Integer> caseInt(MatchesAny i) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Integer.class));
}
/**
* Matches a long.
*/
public static DecomposableMatchBuilder0<Long> caseLong(long l) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(l));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Long.class));
}
/**
* Matches a long.
*
* <p>If matched, the long value is extracted.
*/
public static DecomposableMatchBuilder1<Long, Long> caseLong(MatchesAny<Long> l) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(matchers, 0, new PrimitiveFieldExtractor<>(Long.class));
}
/**
* Matches a float.
*/
public static DecomposableMatchBuilder0<Float> caseFloat(float f) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(f));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Float.class));
}
/**
* Matches a float.
*
* <p>If matched, the float value is extracted.
*/
public static DecomposableMatchBuilder1<Float, Float> caseFloat(MatchesAny f) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Float.class));
}
/**
* Matches a double.
*/
public static DecomposableMatchBuilder0<Double> caseDouble(double d) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(d));
return new DecomposableMatchBuilder0<>(matchers, new PrimitiveFieldExtractor<>(Double.class));
}
/**
* Matches a double.
*
* <p>If matched, the double value is extracted.
*/
public static DecomposableMatchBuilder1<Double, Double> caseDouble(MatchesAny d) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Double.class));
}
/**
* Matches a char.
*/
public static DecomposableMatchBuilder0<Character> caseChar(char c) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(c));
return new DecomposableMatchBuilder0<>(
matchers, new PrimitiveFieldExtractor<>(Character.class));
}
/**
* Matches a char.
*
* <p>If matched, the char value is extracted.
*/
public static DecomposableMatchBuilder1<Character, Character> caseChar(MatchesAny c) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Character.class));
}
/**
* Matches a String.
*/
public static DecomposableMatchBuilder0<String> caseString(String s) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(s));
return new DecomposableMatchBuilder0<>(
matchers, new PrimitiveFieldExtractor<>(String.class));
}
/**
* Matches a String.
*
* <p>If matched, the String value is extracted.
*/
public static DecomposableMatchBuilder1<String, String> caseString(MatchesAny s) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(String.class));
}
/**
* Matches a boolean.
*/
public static DecomposableMatchBuilder0<Boolean> caseBoolean(boolean b) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(eq(b));
return new DecomposableMatchBuilder0<>(
matchers, new PrimitiveFieldExtractor<>(Boolean.class));
}
/**
* Matches a boolean.
*
* <p>If matched, the boolean value is extracted.
*/
public static DecomposableMatchBuilder1<Boolean, Boolean> caseBoolean(MatchesAny b) {
List<Matcher<Object>> matchers = new ArrayList<>();
matchers.add(any());
return new DecomposableMatchBuilder1<>(
matchers, 0, new PrimitiveFieldExtractor<>(Boolean.class));
}
}
| |
package com.mapzen.android.graphics.model;
import com.mapzen.android.graphics.internal.EaseTypeConverter;
import com.mapzen.android.graphics.internal.StyleStringGenerator;
import com.mapzen.tangram.LngLat;
import com.mapzen.tangram.Marker;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
/**
* Dynamic marker overlay constructed using a local bitmap.
*/
public class BitmapMarker {
private final BitmapMarkerManager bitmapMarkerManager;
private Marker tangramMarker;
private final StyleStringGenerator styleStringGenerator;
private LngLat position;
private int resourceId;
private Drawable drawable;
private int width;
private int height;
private boolean isVisible;
private int drawOrder;
private int colorInt;
private String colorHex;
private boolean isInteractive;
/**
* Constructor that wraps a Tangram marker.
*
* @param tangramMarker the underlying Tangram marker object.
*/
public BitmapMarker(BitmapMarkerManager bitmapMarkerManager, Marker tangramMarker,
StyleStringGenerator styleStringGenerator) {
this.bitmapMarkerManager = bitmapMarkerManager;
this.tangramMarker = tangramMarker;
this.styleStringGenerator = styleStringGenerator;
}
/**
* Removes this marker from the map. After a marker has been removed, the behavior of all its
* methods is undefined.
*/
public void remove() {
bitmapMarkerManager.removeMarker(this);
}
/**
* Sets the marker's coordinate position.
* @param position
*/
public void setPosition(@NonNull LngLat position) {
this.position = position;
this.tangramMarker.setPoint(position);
}
/**
* Sets the marker's coordinate position with animation.
* @param position
* @param duration
* @param easeType
*/
public void setPosition(@NonNull LngLat position, int duration, EaseType easeType) {
this.position = position;
this.tangramMarker.setPointEased(position, duration,
EaseTypeConverter.EASE_TYPE_TO_MAP_CONTROLLER_EASE_TYPE.get(easeType));
}
/**
* Returns the marker's coordinate position.
* @return
*/
public @NonNull LngLat getPosition() {
return this.position;
}
/**
* Sets the drawable resource id displayed as the marker's icon. Setting this value will override
* existing icon drawable values set via {@link BitmapMarker#setIcon(Drawable)}.
* @param resourceId
*/
public void setIcon(int resourceId) {
this.resourceId = resourceId;
this.drawable = null;
this.tangramMarker.setDrawable(resourceId);
}
/**
* Returns the marker's icon resource id.
* @return
*/
public int getIcon() {
return this.resourceId;
}
/**
* Sets the drawable displayed as the marker's icon. Setting this value will override existing
* icon resource id values set via {@link BitmapMarker#setIcon(int)}.
* @param drawable
*/
public void setIcon(@Nullable Drawable drawable) {
this.resourceId = Integer.MIN_VALUE;
this.drawable = drawable;
this.tangramMarker.setDrawable(drawable);
}
/**
* Returns the marker's icon drawable.
* @return
*/
public @Nullable Drawable getIconDrawable() {
return this.drawable;
}
/**
* Sets the width and height in pixels for the marker's size.
* @param width
* @param height
*/
public void setSize(int width, int height) {
this.width = width;
this.height = height;
updateStyleString();
}
/**
* Returns the marker's width in pixels.
* @return
*/
public int getWidth() {
return this.width;
}
/**
* Returns the marker's height in pixels.
* @return
*/
public int getHeight() {
return this.height;
}
/**
* Sets the marker's visibility.
* @param visible
*/
public void setVisible(boolean visible) {
this.isVisible = visible;
tangramMarker.setVisible(visible);
}
/**
* Returns whether the marker is visible.
* @return
*/
public boolean isVisible() {
return isVisible;
}
/**
* Sets marker z-axis draw order.
* @param drawOrder
*/
public void setDrawOrder(int drawOrder) {
this.drawOrder = drawOrder;
this.tangramMarker.setDrawOrder(drawOrder);
}
/**
* Returns the marker's z-axis draw order.
* @return
*/
public int getDrawOrder() {
return this.drawOrder;
}
/**
* Sets extra data to be associated with this marker.
* @param userData
*/
public void setUserData(@Nullable Object userData) {
this.tangramMarker.setUserData(userData);
}
/**
* Gets extra data associated with this marker.
* @return
*/
public @Nullable Object getUserData() {
return this.tangramMarker.getUserData();
}
/**
* Sets color of marker given a color int ie {@code android.graphics.Color.BLUE}. Setting this
* value will override existing color hex values set via {@link BitmapMarker#setColor(String)}.
* @param colorInt
*/
public void setColor(int colorInt) {
this.colorInt = colorInt;
this.colorHex = "#" + Integer.toHexString(colorInt);
updateStyleString();
}
/**
* Returns the marker's color int.
* @return
*/
public int getColor() {
return this.colorInt;
}
/**
* Sets color of marker given a color hex string. Setting this value will override existing
* color int values set via {@link BitmapMarker#setColor(int)}.
* @param hex
*/
public void setColor(@NonNull String hex) {
this.colorHex = hex;
this.colorInt = Integer.MIN_VALUE;
updateStyleString();
}
/**
* Returns the marker's color hex.
* @return
*/
public @NonNull String getColorHex() {
return this.colorHex;
}
/**
* Sets whether or not marker can be selected.
* @param interactive
*/
public void setInteractive(boolean interactive) {
this.isInteractive = interactive;
updateStyleString();
}
/**
* Returns whether or not the marker responds to touches.
* @return
*/
public boolean isInteractive() {
return this.isInteractive;
}
/**
* Allows setting the tangram marker, useful when restoring markers.
* @param tangramMarker
*/
void setTangramMarker(Marker tangramMarker) {
this.tangramMarker = tangramMarker;
}
/**
* Returns the underlying Tangram {@link Marker}.
* @return
*/
Marker getTangramMarker() {
return tangramMarker;
}
/**
* Returns the object used to generate style string.
* @return
*/
StyleStringGenerator getStyleStringGenerator() {
return styleStringGenerator;
}
private void updateStyleString() {
tangramMarker.setStylingFromString(styleStringGenerator.getStyleString(width, height,
isInteractive, colorHex));
}
}
| |
/// Copyright 2021 Pinterest Inc.
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
//
// @author Gopal Rajpurohit (grajpurohit@pinterest.com)
//
package com.pinterest.rocksplicator.eventstore;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.CLIENT_OBSERVED_SHARDMAP_LEADER_DOWN;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.CLIENT_OBSERVED_SHARDMAP_LEADER_UP;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.SPECTATOR_OBSERVED_LEADER_DOWN;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.SPECTATOR_OBSERVED_LEADER_UP;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.SPECTATOR_POSTED_SHARDMAP_LEADER_DOWN;
import static com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType.SPECTATOR_POSTED_SHARDMAP_LEADER_UP;
import com.pinterest.rocksplicator.codecs.Codec;
import com.pinterest.rocksplicator.codecs.WrappedDataThriftCodec;
import com.pinterest.rocksplicator.thrift.commons.io.CompressionAlgorithm;
import com.pinterest.rocksplicator.thrift.commons.io.SerializationProtocol;
import com.pinterest.rocksplicator.thrift.eventhistory.LeaderEvent;
import com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventType;
import com.pinterest.rocksplicator.thrift.eventhistory.LeaderEventsHistory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.google.common.math.IntMath;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.BiConsumer;
public class LeaderEventHistoryStore implements Closeable {
private static final Logger LOGGER = LoggerFactory.getLogger(LeaderEventHistoryStore.class);
private static final int NUM_PARALLEL_THREADS = 64;
private final String zkConnectString;
private final String clusterName;
private final CuratorFramework zkClient;
private final Optional<Integer> maxEventsToKeep;
private final Codec<LeaderEventsHistory, byte[]> leaderEventsHistoryCodec;
private final List<ExecutorService> executorServices;
private final LoadingCache<String, LoadingCache<String,
MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>>
zkStoreCache;
private final LoadingCache<String, Cache<String, LeaderEventsHistory>> localHistoryCache;
private final List<Lock> clusterResourcePartitionLock;
public LeaderEventHistoryStore(
final String zkConnectString,
final String clusterName,
final Optional<Integer> maxEventsToKeep) {
this.zkConnectString = Preconditions.checkNotNull(zkConnectString);
this.clusterName = Preconditions.checkNotNull(clusterName);
this.leaderEventsHistoryCodec = new WrappedDataThriftCodec(
LeaderEventsHistory.class, SerializationProtocol.COMPACT, CompressionAlgorithm.GZIP);
this.maxEventsToKeep = maxEventsToKeep;
this.zkClient =
CuratorFrameworkFactory.newClient(Preconditions.checkNotNull(this.zkConnectString),
new ExponentialBackoffRetry(1000, 3));
this.zkClient.start();
try {
this.zkClient.blockUntilConnected(60, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error(String.format("Can't connect to zookeeper: %s", zkConnectString), e);
throw new RuntimeException(e);
}
this.zkStoreCache = CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.removalListener(
new RemovalListener<String, LoadingCache<String,
MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>>() {
@Override
public void onRemoval(
RemovalNotification<String, LoadingCache<String,
MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>> notification) {
notification.getValue().invalidateAll();
}
})
.build(new CacheLoader<String,
LoadingCache<String, MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>>() {
@Override
public LoadingCache<String, MergeableReadWriteStore<LeaderEventsHistory,
LeaderEvent>> load(
String resourceName) throws Exception {
return CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.removalListener(
new RemovalListener<String, MergeableReadWriteStore<LeaderEventsHistory,
LeaderEvent>>() {
@Override
public void onRemoval(
RemovalNotification<String, MergeableReadWriteStore<LeaderEventsHistory,
LeaderEvent>> notification) {
try {
notification.getValue().close();
} catch (IOException e) {
LOGGER.warn("Error while closing the zkStore", e);
}
}
})
.build(new CacheLoader<String,
MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>() {
@Override
public MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent> load(
String partitionName) throws Exception {
return new ZkMergeableEventStore<>(zkClient, clusterName,
resourceName, partitionName, leaderEventsHistoryCodec,
LeaderEventsHistory::new,
MergeOperators
.createBatchMergeOperator(resourceName, partitionName,
LeaderEventHistoryStore.this.maxEventsToKeep));
}
});
}
});
this.executorServices = new ArrayList<>(NUM_PARALLEL_THREADS);
for (int i = 0; i < NUM_PARALLEL_THREADS; ++i) {
this.executorServices.add(Executors.newSingleThreadExecutor());
}
this.clusterResourcePartitionLock = new ArrayList<>(NUM_PARALLEL_THREADS);
for (int i = 0; i < NUM_PARALLEL_THREADS; ++i) {
this.clusterResourcePartitionLock.add(new ReentrantLock());
}
this.localHistoryCache = CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoader<String, Cache<String, LeaderEventsHistory>>() {
@Override
public Cache<String, LeaderEventsHistory> load(String resourceName) throws Exception {
return CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build();
}
});
}
private static String getFullIdentifier(String cluster, String resource, String partition) {
StringBuilder builder = new StringBuilder()
.append(cluster)
.append('/')
.append(resource)
.append('/')
.append(partition);
return builder.toString();
}
private ExecutorService getExecutorService(String cluster, String resource, String partition) {
return this.executorServices.get(
IntMath.mod(getFullIdentifier(cluster, resource, partition).hashCode(),
this.executorServices.size()));
}
private Lock getLock(String cluster, String resource, String partition) {
return this.clusterResourcePartitionLock.get(
IntMath.mod(getFullIdentifier(cluster, resource, partition).hashCode(),
this.clusterResourcePartitionLock.size()));
}
public void asyncAppend(String resourceName, String partitionName, LeaderEvent event) {
List<LeaderEvent> leaderEvents = new ArrayList<>();
leaderEvents.add(event);
asyncBatchAppend(resourceName, partitionName, leaderEvents);
}
public void asyncBatchAppend(String resourceName, String partitionName,
List<LeaderEvent> events) {
ExecutorService service = getExecutorService(clusterName, resourceName, partitionName);
service.submit(() -> batchAppend(resourceName, partitionName, events));
}
@VisibleForTesting
void batchAppend(String resource, String partition, List<LeaderEvent> leaderEvents) {
try {
batchUnProtectedAppend(resource, partition, leaderEvents);
} catch (Throwable e) {
LOGGER.error("Error processing events", e);
}
}
/**
* Blocks the calling thread. Hence not directly available to the client.
*/
@VisibleForTesting
void batchUnProtectedAppend(String resource, String partition, List<LeaderEvent> leaderEvents) {
Preconditions.checkNotNull(resource);
Preconditions.checkNotNull(partition);
Preconditions.checkNotNull(leaderEvents);
for (LeaderEvent leaderEvent : leaderEvents) {
Preconditions.checkArgument(leaderEvent.isSetEvent_type());
Preconditions.checkArgument(leaderEvent.isSetEvent_timestamp_ms());
Preconditions.checkArgument(leaderEvent.isSetOriginating_node());
}
Lock lock = getLock(clusterName, resource, partition);
lock.lock();
try {
LeaderEventsHistory cachedHistory =
localHistoryCache.getUnchecked(resource).getIfPresent(partition);
if (cachedHistory == null) {
try {
cachedHistory = zkStoreCache.getUnchecked(resource).getUnchecked(partition).read();
if (cachedHistory != null) {
localHistoryCache.getUnchecked(resource).put(partition, cachedHistory);
}
} catch (IOException exp) {
// Do nothing. This happens when we are writing for the first time.
}
}
leaderEvents.sort(new DescendingTimestampLeaderEventComparator());
// Either all of them are logged or none of them are logged...
// hence, if any event is not eligible,
// none in the batch are considered to be eligible.
for (LeaderEvent leaderEvent : leaderEvents) {
if (!isEligible(cachedHistory, leaderEvent)) {
return;
}
}
LeaderEventsHistory batchUpdateHistory = new LeaderEventsHistory().setEvents(leaderEvents);
LeaderEventsHistory mergedHistory =
zkStoreCache.getUnchecked(resource).getUnchecked(partition).merge(batchUpdateHistory);
if (mergedHistory != null) {
localHistoryCache.getUnchecked(resource).put(partition, mergedHistory);
}
} catch (Throwable e) {
LOGGER.error(String.format("Error processing leaderevents: %s", leaderEvents), e);
} finally {
lock.unlock();
}
}
@VisibleForTesting
boolean isEligible(LeaderEventsHistory lastKnownHistory, LeaderEvent leaderEvent) {
if (lastKnownHistory != null) {
if (LeaderEventTypes.spectatorEventTypes.contains(leaderEvent.getEvent_type())) {
if (!shouldAddSpectatorEvent(lastKnownHistory, leaderEvent)) {
// No further processing of this event, as it is a duplicate of previous event
// and the state has not changed from what is previously available.
return false;
}
} else if (LeaderEventTypes.clientEventTypes.contains(leaderEvent.getEvent_type())) {
if (!shouldAddClientEvent(lastKnownHistory, leaderEvent)) {
return false;
}
} else if (LeaderEventTypes.participantEventTypes.contains(leaderEvent.getEvent_type())) {
if (!shouldAddParticipantEvent(lastKnownHistory, leaderEvent)) {
return false;
}
} else {
// Unknown event, log and skip the event. We skip the event here, in order to protect
// the integrity of the system. We need to know what we are logging, before logging the
// data into zk store.
LOGGER.error("Unknown type of leader event, not logging to zookeeper {}", leaderEvent);
return false;
}
}
return true;
}
@VisibleForTesting
boolean shouldAddParticipantEvent(LeaderEventsHistory lastKnownHistory,
LeaderEvent currentLeaderEvent) {
// we log all participant events, as it is rare occurrence when duplicate transitions gets
// fired.
// We can revisit this logic if this is found to be more often.
return true;
}
@VisibleForTesting
boolean shouldAddClientEvent(LeaderEventsHistory lastKnownHistory,
LeaderEvent currentLeaderEvent) {
LeaderEventType currentLeaderEventType = currentLeaderEvent.getEvent_type();
if (currentLeaderEventType == CLIENT_OBSERVED_SHARDMAP_LEADER_UP) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.clientEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == CLIENT_OBSERVED_SHARDMAP_LEADER_DOWN) {
return true;
} else if (oldEventType == CLIENT_OBSERVED_SHARDMAP_LEADER_UP) {
if (oldEvent.getObserved_leader_node()
.equals(currentLeaderEvent.getObserved_leader_node())) {
// This is a repeat event, hence ignore this event and do not merge.
return false;
} else {
return true;
}
}
}
return true;
} else if (currentLeaderEvent.getEvent_type() == CLIENT_OBSERVED_SHARDMAP_LEADER_DOWN) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.clientEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == CLIENT_OBSERVED_SHARDMAP_LEADER_DOWN) {
// This is a repeat event for downed leader of a partition.. ignore.
// We won't have an observer_node in this case.
return false;
} else if (oldEventType == CLIENT_OBSERVED_SHARDMAP_LEADER_UP) {
return true;
}
}
// This is a first event of type leader going down, so log it.
return true;
}
LOGGER.error("Unknown event: skipping %s", currentLeaderEvent);
return false;
}
@VisibleForTesting
boolean shouldAddSpectatorEvent(LeaderEventsHistory lastKnownHistory,
LeaderEvent currentLeaderEvent) {
LeaderEventType currentLeaderEventType = currentLeaderEvent.getEvent_type();
if (currentLeaderEventType == SPECTATOR_OBSERVED_LEADER_UP) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.spectatorEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == SPECTATOR_OBSERVED_LEADER_DOWN) {
return true;
} else if (oldEventType == SPECTATOR_OBSERVED_LEADER_UP) {
if (oldEvent.getObserved_leader_node()
.equals(currentLeaderEvent.getObserved_leader_node())) {
// This is a repeat event, hence ignore this event and do not merge.
return false;
} else {
return true;
}
}
}
return true;
} else if (currentLeaderEventType == SPECTATOR_POSTED_SHARDMAP_LEADER_UP) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.spectatorEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == SPECTATOR_POSTED_SHARDMAP_LEADER_DOWN) {
return true;
} else if (oldEventType == SPECTATOR_POSTED_SHARDMAP_LEADER_UP) {
if (oldEvent.getObserved_leader_node()
.equals(currentLeaderEvent.getObserved_leader_node())) {
// This is a repeat event, hence ignore this event and do not merge.
return false;
} else {
return true;
}
}
}
return true;
} else if (currentLeaderEvent.getEvent_type() == SPECTATOR_OBSERVED_LEADER_DOWN) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.spectatorEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == SPECTATOR_OBSERVED_LEADER_DOWN) {
// This is a repeat event for downed leader of a partition.. ignore.
// We won't have an observer_node in this case.
return false;
} else if (oldEventType == SPECTATOR_OBSERVED_LEADER_UP) {
return true;
}
}
// This is a first event of type leader going down, so log it.
return true;
} else if (currentLeaderEvent.getEvent_type() == SPECTATOR_POSTED_SHARDMAP_LEADER_DOWN) {
for (int i = 0; i < lastKnownHistory.getEventsSize(); ++i) {
LeaderEvent oldEvent = lastKnownHistory.getEvents().get(i);
LeaderEventType oldEventType = oldEvent.getEvent_type();
if (!LeaderEventTypes.spectatorEventTypes.contains(oldEventType)) {
continue;
}
// continue if the old event originated on different node.
if (!oldEvent.getOriginating_node().equals(currentLeaderEvent.getOriginating_node())) {
continue;
}
if (oldEventType == SPECTATOR_POSTED_SHARDMAP_LEADER_DOWN) {
// This is a repeat event for downed leader of a partition.. ignore.
// We won't have an observer_node in this case.
return false;
} else if (oldEventType == SPECTATOR_POSTED_SHARDMAP_LEADER_UP) {
return true;
}
}
// This is a first event of type leader going down, so log it.
return true;
}
LOGGER.error("Unknown event: skipping %s", currentLeaderEvent);
return false;
}
public void resetCache() {
this.zkStoreCache.asMap().forEach(
new BiConsumer<String, LoadingCache<String, MergeableReadWriteStore<LeaderEventsHistory,
LeaderEvent>>>() {
@Override
public void accept(String resourceName,
LoadingCache<String, MergeableReadWriteStore<LeaderEventsHistory,
LeaderEvent>> cache) {
cache.asMap().forEach(
new BiConsumer<String, MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent>>() {
@Override
public void accept(String partitionName,
MergeableReadWriteStore<LeaderEventsHistory, LeaderEvent> zkStore) {
try {
zkStore.close();
} catch (IOException e) {
LOGGER.error(String
.format("Couldn't close zkStore for resource: %s partition: %s",
resourceName, partitionName));
}
}
});
cache.invalidateAll();
}
});
this.zkStoreCache.invalidateAll();
}
@Override
public void close() throws IOException {
for (ExecutorService service : executorServices) {
service.shutdown();
}
for (ExecutorService service : executorServices) {
while (!service.isTerminated()) {
try {
service.awaitTermination(100, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOGGER.error("Interrupted: ", e);
}
}
}
resetCache();
this.zkClient.close();
}
}
| |
/*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.common;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.BufferedImageLuminanceSource;
import com.google.zxing.DecodeHintType;
import com.google.zxing.LuminanceSource;
import com.google.zxing.Reader;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.ResultMetadataType;
import org.junit.Assert;
import org.junit.Test;
import javax.imageio.ImageIO;
import java.awt.Graphics;
import java.awt.geom.AffineTransform;
import java.awt.geom.RectangularShape;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Logger;
/**
* @author Sean Owen
* @author dswitkin@google.com (Daniel Switkin)
*/
public abstract class AbstractBlackBoxTestCase extends Assert {
private static final Logger log = Logger.getLogger(AbstractBlackBoxTestCase.class.getSimpleName());
private final Path testBase;
private final Reader barcodeReader;
private final BarcodeFormat expectedFormat;
private final List<TestResult> testResults;
public static Path buildTestBase(String testBasePathSuffix) {
// A little workaround to prevent aggravation in my IDE
Path testBase = Paths.get(testBasePathSuffix);
if (!Files.exists(testBase)) {
// try starting with 'core' since the test base is often given as the project root
testBase = Paths.get("core").resolve(testBasePathSuffix);
}
return testBase;
}
protected AbstractBlackBoxTestCase(String testBasePathSuffix,
Reader barcodeReader,
BarcodeFormat expectedFormat) {
this.testBase = buildTestBase(testBasePathSuffix);
this.barcodeReader = barcodeReader;
this.expectedFormat = expectedFormat;
testResults = new ArrayList<>();
System.setProperty("java.util.logging.SimpleFormatter.format", "%4$s: %5$s%6$s%n");
}
protected final Path getTestBase() {
return testBase;
}
protected final void addTest(int mustPassCount, int tryHarderCount, float rotation) {
addTest(mustPassCount, tryHarderCount, 0, 0, rotation);
}
/**
* Adds a new test for the current directory of images.
*
* @param mustPassCount The number of images which must decode for the test to pass.
* @param tryHarderCount The number of images which must pass using the try harder flag.
* @param maxMisreads Maximum number of images which can fail due to successfully reading the wrong contents
* @param maxTryHarderMisreads Maximum number of images which can fail due to successfully
* reading the wrong contents using the try harder flag
* @param rotation The rotation in degrees clockwise to use for this test.
*/
protected final void addTest(int mustPassCount,
int tryHarderCount,
int maxMisreads,
int maxTryHarderMisreads,
float rotation) {
testResults.add(new TestResult(mustPassCount, tryHarderCount, maxMisreads, maxTryHarderMisreads, rotation));
}
protected final List<Path> getImageFiles() throws IOException {
assertTrue("Please download and install test images, and run from the 'core' directory", Files.exists(testBase));
List<Path> paths = new ArrayList<>();
try (DirectoryStream<Path> pathIt = Files.newDirectoryStream(testBase, "*.{jpg,jpeg,gif,png,JPG,JPEG,GIF,PNG}")) {
for (Path path : pathIt) {
paths.add(path);
}
}
return paths;
}
final Reader getReader() {
return barcodeReader;
}
@Test
public void testBlackBox() throws IOException {
assertFalse(testResults.isEmpty());
List<Path> imageFiles = getImageFiles();
int testCount = testResults.size();
int[] passedCounts = new int[testCount];
int[] misreadCounts = new int[testCount];
int[] tryHarderCounts = new int[testCount];
int[] tryHarderMisreadCounts = new int[testCount];
for (Path testImage : imageFiles) {
log.info(String.format("Starting %s", testImage));
BufferedImage image = ImageIO.read(testImage.toFile());
String testImageFileName = testImage.getFileName().toString();
String fileBaseName = testImageFileName.substring(0, testImageFileName.indexOf('.'));
Path expectedTextFile = testBase.resolve(fileBaseName + ".txt");
String expectedText;
if (Files.exists(expectedTextFile)) {
expectedText = readFileAsString(expectedTextFile, StandardCharsets.UTF_8);
} else {
expectedTextFile = testBase.resolve(fileBaseName + ".bin");
assertTrue(Files.exists(expectedTextFile));
expectedText = readFileAsString(expectedTextFile, StandardCharsets.ISO_8859_1);
}
Path expectedMetadataFile = testBase.resolve(fileBaseName + ".metadata.txt");
Properties expectedMetadata = new Properties();
if (Files.exists(expectedMetadataFile)) {
try (BufferedReader reader = Files.newBufferedReader(expectedMetadataFile, StandardCharsets.UTF_8)) {
expectedMetadata.load(reader);
}
}
for (int x = 0; x < testCount; x++) {
float rotation = testResults.get(x).getRotation();
BufferedImage rotatedImage = rotateImage(image, rotation);
LuminanceSource source = new BufferedImageLuminanceSource(rotatedImage);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
if (decode(bitmap, rotation, expectedText, expectedMetadata, false)) {
passedCounts[x]++;
} else {
misreadCounts[x]++;
}
} catch (ReaderException ignored) {
log.fine(String.format("could not read at rotation %f", rotation));
}
try {
if (decode(bitmap, rotation, expectedText, expectedMetadata, true)) {
tryHarderCounts[x]++;
} else {
tryHarderMisreadCounts[x]++;
}
} catch (ReaderException ignored) {
log.fine(String.format("could not read at rotation %f w/TH", rotation));
}
}
}
// Print the results of all tests first
int totalFound = 0;
int totalMustPass = 0;
int totalMisread = 0;
int totalMaxMisread = 0;
for (int x = 0; x < testResults.size(); x++) {
TestResult testResult = testResults.get(x);
log.info(String.format("Rotation %d degrees:", (int) testResult.getRotation()));
log.info(String.format(" %d of %d images passed (%d required)",
passedCounts[x], imageFiles.size(), testResult.getMustPassCount()));
int failed = imageFiles.size() - passedCounts[x];
log.info(String.format(" %d failed due to misreads, %d not detected",
misreadCounts[x], failed - misreadCounts[x]));
log.info(String.format(" %d of %d images passed with try harder (%d required)",
tryHarderCounts[x], imageFiles.size(), testResult.getTryHarderCount()));
failed = imageFiles.size() - tryHarderCounts[x];
log.info(String.format(" %d failed due to misreads, %d not detected",
tryHarderMisreadCounts[x], failed - tryHarderMisreadCounts[x]));
totalFound += passedCounts[x] + tryHarderCounts[x];
totalMustPass += testResult.getMustPassCount() + testResult.getTryHarderCount();
totalMisread += misreadCounts[x] + tryHarderMisreadCounts[x];
totalMaxMisread += testResult.getMaxMisreads() + testResult.getMaxTryHarderMisreads();
}
int totalTests = imageFiles.size() * testCount * 2;
log.info(String.format("Decoded %d images out of %d (%d%%, %d required)",
totalFound, totalTests, totalFound * 100 / totalTests, totalMustPass));
if (totalFound > totalMustPass) {
log.warning(String.format("+++ Test too lax by %d images", totalFound - totalMustPass));
} else if (totalFound < totalMustPass) {
log.warning(String.format("--- Test failed by %d images", totalMustPass - totalFound));
}
if (totalMisread < totalMaxMisread) {
log.warning(String.format("+++ Test expects too many misreads by %d images", totalMaxMisread - totalMisread));
} else if (totalMisread > totalMaxMisread) {
log.warning(String.format("--- Test had too many misreads by %d images", totalMisread - totalMaxMisread));
}
// Then run through again and assert if any failed
for (int x = 0; x < testCount; x++) {
TestResult testResult = testResults.get(x);
String label = "Rotation " + testResult.getRotation() + " degrees: Too many images failed";
assertTrue(label,
passedCounts[x] >= testResult.getMustPassCount());
assertTrue("Try harder, " + label,
tryHarderCounts[x] >= testResult.getTryHarderCount());
label = "Rotation " + testResult.getRotation() + " degrees: Too many images misread";
assertTrue(label,
misreadCounts[x] <= testResult.getMaxMisreads());
assertTrue("Try harder, " + label,
tryHarderMisreadCounts[x] <= testResult.getMaxTryHarderMisreads());
}
}
private boolean decode(BinaryBitmap source,
float rotation,
String expectedText,
Map<?,?> expectedMetadata,
boolean tryHarder) throws ReaderException {
String suffix = String.format(" (%srotation: %d)", tryHarder ? "try harder, " : "", (int) rotation);
Map<DecodeHintType,Object> hints = new EnumMap<>(DecodeHintType.class);
if (tryHarder) {
hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
}
// Try in 'pure' mode mostly to exercise PURE_BARCODE code paths for exceptions;
// not expected to pass, generally
Result result = null;
try {
Map<DecodeHintType,Object> pureHints = new EnumMap<>(hints);
pureHints.put(DecodeHintType.PURE_BARCODE, Boolean.TRUE);
result = barcodeReader.decode(source, pureHints);
} catch (ReaderException re) {
// continue
}
if (result == null) {
result = barcodeReader.decode(source, hints);
}
if (expectedFormat != result.getBarcodeFormat()) {
log.info(String.format("Format mismatch: expected '%s' but got '%s'%s",
expectedFormat, result.getBarcodeFormat(), suffix));
return false;
}
String resultText = result.getText();
if (!expectedText.equals(resultText)) {
log.info(String.format("Content mismatch: expected '%s' but got '%s'%s",
expectedText, resultText, suffix));
return false;
}
Map<ResultMetadataType,?> resultMetadata = result.getResultMetadata();
for (Map.Entry<?,?> metadatum : expectedMetadata.entrySet()) {
ResultMetadataType key = ResultMetadataType.valueOf(metadatum.getKey().toString());
Object expectedValue = metadatum.getValue();
Object actualValue = resultMetadata == null ? null : resultMetadata.get(key);
if (!expectedValue.equals(actualValue)) {
log.info(String.format("Metadata mismatch for key '%s': expected '%s' but got '%s'",
key, expectedValue, actualValue));
return false;
}
}
return true;
}
protected static String readFileAsString(Path file, Charset charset) throws IOException {
String stringContents = new String(Files.readAllBytes(file), charset);
if (stringContents.endsWith("\n")) {
log.info("String contents of file " + file + " end with a newline. " +
"This may not be intended and cause a test failure");
}
return stringContents;
}
protected static BufferedImage rotateImage(BufferedImage original, float degrees) {
if (degrees == 0.0f) {
return original;
}
switch (original.getType()) {
case BufferedImage.TYPE_BYTE_INDEXED:
case BufferedImage.TYPE_BYTE_BINARY:
BufferedImage argb = new BufferedImage(original.getWidth(),
original.getHeight(),
BufferedImage.TYPE_INT_ARGB);
Graphics g = argb.createGraphics();
g.drawImage(original, 0, 0, null);
g.dispose();
original = argb;
break;
}
double radians = Math.toRadians(degrees);
// Transform simply to find out the new bounding box (don't actually run the image through it)
AffineTransform at = new AffineTransform();
at.rotate(radians, original.getWidth() / 2.0, original.getHeight() / 2.0);
BufferedImageOp op = new AffineTransformOp(at, AffineTransformOp.TYPE_BICUBIC);
RectangularShape r = op.getBounds2D(original);
int width = (int) Math.ceil(r.getWidth());
int height = (int) Math.ceil(r.getHeight());
// Real transform, now that we know the size of the new image and how to translate after we rotate
// to keep it centered
at = new AffineTransform();
at.rotate(radians, width / 2.0, height / 2.0);
at.translate((width - original.getWidth()) / 2.0,
(height - original.getHeight()) / 2.0);
op = new AffineTransformOp(at, AffineTransformOp.TYPE_BICUBIC);
return op.filter(original, new BufferedImage(width, height, original.getType()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.filters;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import jakarta.servlet.FilterChain;
import jakarta.servlet.GenericFilter;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletRequest;
import jakarta.servlet.ServletResponse;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
/**
* <p>Implementation of a Filter that logs interesting contents from the
* specified Request (before processing) and the corresponding Response
* (after processing). It is especially useful in debugging problems
* related to headers and cookies.</p>
*
* <p>When using this Filter, it is strongly recommended that the
* <code>org.apache.catalina.filter.RequestDumperFilter</code> logger is
* directed to a dedicated file and that the
* <code>org.apache.juli.VerbatimFormatter</code> is used.</p>
*
* @author Craig R. McClanahan
*/
public class RequestDumperFilter extends GenericFilter {
private static final long serialVersionUID = 1L;
private static final String NON_HTTP_REQ_MSG =
"Not available. Non-http request.";
private static final String NON_HTTP_RES_MSG =
"Not available. Non-http response.";
private static final ThreadLocal<Timestamp> timestamp = ThreadLocal.withInitial(Timestamp::new);
// Log must be non-static as loggers are created per class-loader and this
// Filter may be used in multiple class loaders
private transient Log log = LogFactory.getLog(RequestDumperFilter.class);
/**
* Log the interesting request parameters, invoke the next Filter in the
* sequence, and log the interesting response parameters.
*
* @param request The servlet request to be processed
* @param response The servlet response to be created
* @param chain The filter chain being processed
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet error occurs
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
HttpServletRequest hRequest = null;
HttpServletResponse hResponse = null;
if (request instanceof HttpServletRequest) {
hRequest = (HttpServletRequest) request;
}
if (response instanceof HttpServletResponse) {
hResponse = (HttpServletResponse) response;
}
// Log pre-service information
doLog("START TIME ", getTimestamp());
if (hRequest == null) {
doLog(" requestURI", NON_HTTP_REQ_MSG);
doLog(" authType", NON_HTTP_REQ_MSG);
} else {
doLog(" requestURI", hRequest.getRequestURI());
doLog(" authType", hRequest.getAuthType());
}
doLog(" characterEncoding", request.getCharacterEncoding());
doLog(" contentLength",
Long.toString(request.getContentLengthLong()));
doLog(" contentType", request.getContentType());
if (hRequest == null) {
doLog(" contextPath", NON_HTTP_REQ_MSG);
doLog(" cookie", NON_HTTP_REQ_MSG);
doLog(" header", NON_HTTP_REQ_MSG);
} else {
doLog(" contextPath", hRequest.getContextPath());
Cookie cookies[] = hRequest.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
doLog(" cookie", cookie.getName() +
"=" + cookie.getValue());
}
}
Enumeration<String> hnames = hRequest.getHeaderNames();
while (hnames.hasMoreElements()) {
String hname = hnames.nextElement();
Enumeration<String> hvalues = hRequest.getHeaders(hname);
while (hvalues.hasMoreElements()) {
String hvalue = hvalues.nextElement();
doLog(" header", hname + "=" + hvalue);
}
}
}
doLog(" locale", request.getLocale().toString());
if (hRequest == null) {
doLog(" method", NON_HTTP_REQ_MSG);
} else {
doLog(" method", hRequest.getMethod());
}
Enumeration<String> pnames = request.getParameterNames();
while (pnames.hasMoreElements()) {
String pname = pnames.nextElement();
String pvalues[] = request.getParameterValues(pname);
StringBuilder result = new StringBuilder(pname);
result.append('=');
for (int i = 0; i < pvalues.length; i++) {
if (i > 0) {
result.append(", ");
}
result.append(pvalues[i]);
}
doLog(" parameter", result.toString());
}
if (hRequest == null) {
doLog(" pathInfo", NON_HTTP_REQ_MSG);
} else {
doLog(" pathInfo", hRequest.getPathInfo());
}
doLog(" protocol", request.getProtocol());
if (hRequest == null) {
doLog(" queryString", NON_HTTP_REQ_MSG);
} else {
doLog(" queryString", hRequest.getQueryString());
}
doLog(" remoteAddr", request.getRemoteAddr());
doLog(" remoteHost", request.getRemoteHost());
if (hRequest == null) {
doLog(" remoteUser", NON_HTTP_REQ_MSG);
doLog("requestedSessionId", NON_HTTP_REQ_MSG);
} else {
doLog(" remoteUser", hRequest.getRemoteUser());
doLog("requestedSessionId", hRequest.getRequestedSessionId());
}
doLog(" scheme", request.getScheme());
doLog(" serverName", request.getServerName());
doLog(" serverPort",
Integer.toString(request.getServerPort()));
if (hRequest == null) {
doLog(" servletPath", NON_HTTP_REQ_MSG);
} else {
doLog(" servletPath", hRequest.getServletPath());
}
doLog(" isSecure",
Boolean.valueOf(request.isSecure()).toString());
doLog("------------------",
"--------------------------------------------");
// Perform the request
chain.doFilter(request, response);
// Log post-service information
doLog("------------------",
"--------------------------------------------");
if (hRequest == null) {
doLog(" authType", NON_HTTP_REQ_MSG);
} else {
doLog(" authType", hRequest.getAuthType());
}
doLog(" contentType", response.getContentType());
if (hResponse == null) {
doLog(" header", NON_HTTP_RES_MSG);
} else {
Iterable<String> rhnames = hResponse.getHeaderNames();
for (String rhname : rhnames) {
Iterable<String> rhvalues = hResponse.getHeaders(rhname);
for (String rhvalue : rhvalues) {
doLog(" header", rhname + "=" + rhvalue);
}
}
}
if (hRequest == null) {
doLog(" remoteUser", NON_HTTP_REQ_MSG);
} else {
doLog(" remoteUser", hRequest.getRemoteUser());
}
if (hResponse == null) {
doLog(" status", NON_HTTP_RES_MSG);
} else {
doLog(" status",
Integer.toString(hResponse.getStatus()));
}
doLog("END TIME ", getTimestamp());
doLog("==================",
"============================================");
}
private void doLog(String attribute, String value) {
StringBuilder sb = new StringBuilder(80);
sb.append(Thread.currentThread().getName());
sb.append(' ');
sb.append(attribute);
sb.append('=');
sb.append(value);
log.info(sb.toString());
}
private String getTimestamp() {
Timestamp ts = timestamp.get();
long currentTime = System.currentTimeMillis();
if ((ts.date.getTime() + 999) < currentTime) {
ts.date.setTime(currentTime - (currentTime % 1000));
ts.update();
}
return ts.dateString;
}
/*
* Log objects are not Serializable but this Filter is because it extends
* GenericFilter. Tomcat won't serialize a Filter but in case something else
* does...
*/
private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
ois.defaultReadObject();
log = LogFactory.getLog(RequestDumperFilter.class);
}
private static final class Timestamp {
private final Date date = new Date(0);
private final SimpleDateFormat format =
new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss");
private String dateString = format.format(date);
private void update() {
dateString = format.format(date);
}
}
}
| |
package org.example.fogbeam.blackboard;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.concurrent.BlockingQueue;
import org.example.fogbeam.blackboard.agent.AIML_InterpreterAgent;
import org.example.fogbeam.blackboard.agent.AtCommandAgent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ExecutiveRunnable implements Runnable,Observer
{
Logger logger = LoggerFactory.getLogger( ExecutiveRunnable.class );
private volatile boolean keepRunning = true;
private BlockingQueue<String> inputMessageQueue;
private Blackboard blackboard = new Blackboard();
private String response = null;
public ExecutiveRunnable( final BlockingQueue<String> inputMessageQueue )
{
this.inputMessageQueue = inputMessageQueue;
this.blackboard.addObserver( new AtCommandAgent());
this.blackboard.addObserver( new AIML_InterpreterAgent() );
this.blackboard.addObserver( this );
}
public void offerMessage( final String message )
{
this.inputMessageQueue.offer(message);
}
// The executive has to monitor the blackboard and look for the
// available responses. If there's at least one frame that meets the minimum
// threshold to send, use the highest scoring such frame for our response.
// if not, keep waiting until the timeout to give up and say "I don't know" (or whatever)
// additionally, the executive should always wait at least some minimum period to
// respond, so that a barely-adequate response from a faster responding agent doesn't
// trump a higher-scoring response that takes longer to arrive on the blackboard.
public String getResponse()
{
return response;
}
@Override
public void run()
{
while( keepRunning )
{
try
{
String input = inputMessageQueue.take();
if( input == null || input.isEmpty() )
{
continue;
}
logger.info( "Executive received message: " + input );
// is there a currently valid conversation? If so, add this input to that
// conversation, otherwise, start a new one
// NOTE: do we *really* need the "Conversation" concept? Could we just have
// the Blackboard keep track of the messages and let it effectively
// *be* the "Conversation"? What does this buy us?
// The Conversation concept allows the bot to carry on multiple independent
// conversations simultaneously. But given this, how will we know which
// conversation a given input message is meant for? Hmm... things to ponder.
// for now let's cheat and pretend there can only be one Conversation at a time.
List<Conversation> extantConversations = blackboard.getExtantConversations();
if( extantConversations == null || extantConversations.isEmpty() )
{
logger.info( "Starting new Conversation" );
Conversation conversation = new Conversation();
BlackboardFrame newFrame = new SimpleBlackboardFrame(input);
newFrame.setSourceTag(this.getClass().getName());
newFrame.setInput( true );
blackboard.offer( conversation );
conversation.addMessage( newFrame );
}
else
{
logger.info( "Found existing Conversation" );
Conversation conversation = extantConversations.get(extantConversations.size()-1);
BlackboardFrame newFrame = new SimpleBlackboardFrame(input);
newFrame.setSourceTag(this.getClass().getName());
newFrame.setInput( true );
conversation.setPaused(false); // resume this conversation
conversation.addMessage( newFrame );
}
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
logger.info( "DispatcherRunnable stopped..." );
}
public void stop()
{
this.keepRunning = false;
}
/* all the real work happens here */
protected void process( final Conversation conversation, final BlackboardFrame frame )
{
// TODO: find a way to ignore "left over" frames from an earlier part of the
// conversation, that show up too late to be useful. We don't want to accidentally
// emit a response to something that has nothing to do with the current
// percept.
// here the Executive monitors the Conversation and decides when something
// suitable for output has appeared.
// somebody also needs a way to mark a Conversation as
// "completed" and remove it from the Blackboard
List<BlackboardFrame> frames = conversation.getFrames();
if( frames == null )
{
throw new RuntimeException( "no frames list in Conversation");
}
// walk backwards down this list until we encounter the first frame that
// we sent. If any frame in that subset is both marked for output
// AND has a sufficiently high confidence score, then send it as
// our response.
BlackboardFrame currentHighScore = null;
for( int i = ( frames.size() - 1 ); i >= 0; i-- )
{
BlackboardFrame candidateFrame = frames.get(i);
if( candidateFrame == null )
{
throw new RuntimeException( "Error getting candidate frame from frame list!");
}
if( candidateFrame.getSourceTag().equals(this.getClass().getName()))
{
// we've reached a frame we sent, so stop
if( currentHighScore != null && currentHighScore.getConfidence() > 35.0 )
{
logger.info( "ExecutiveRunnable - setting response!" );
response = currentHighScore.getContent();
conversation.setPaused(true);
}
break;
}
else
{
if( candidateFrame.isOutput() && currentHighScore == null )
{
currentHighScore = candidateFrame;
}
else if( candidateFrame.isOutput() && currentHighScore != null )
{
if( candidateFrame.getConfidence() > currentHighScore.getConfidence() )
{
logger.info( "swapping currentHighScore for candidateFrame" );
currentHighScore = candidateFrame;
}
}
}
}
}
@Override
public void update(Observable observable, Object arg)
{
logger.info( "ExecutiveRunnable - got update from observable" );
if( observable instanceof Blackboard && arg instanceof Conversation )
{
logger.info( "ExecutiveRunnable - registering as Observer for Conversation" );
Conversation conversation = (Conversation)arg;
conversation.addObserver(this);
}
else if( observable instanceof Conversation && arg instanceof BlackboardFrame )
{
logger.info( "ExecutiveRunnable - processing Conversation" );
BlackboardFrame frame = (BlackboardFrame) arg;
if( !frame.getSourceTag().equals(this.getClass().getName()))
{
process( (Conversation)observable, frame );
}
}
else
{
throw new IllegalArgumentException( "update() received illegal parameter combination: "
+ observable.getClass().getName() + " | "
+ arg.getClass().getName() );
}
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kns.datadictionary;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.datadictionary.DataDictionary;
import org.kuali.rice.krad.datadictionary.exception.AttributeValidationException;
import org.kuali.rice.krad.datadictionary.exception.DuplicateEntryException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
The maintainableCollection element defines a set of data fields, nested
collections, summaryFields and duplicateIdentificationsFields.
JSTL: maintainableCollection is a Map which is accessed using a
key of the name of the maintainableCollection. Each entry
contains the following keys and values:
**Key** **Value**
collection true
name name of collection
dataObjectClass name of collection class
* name is the name of the collection
* dataObjectClass is the class name of the objects in the collection
* sourceClassName is the class name of the BO used in a lookup
* sourceAttributeName is the name of the attribute which returns the collection
* includeAddLine is true if the user is given the ability to add multiple lines.
* includeMultipleLookupLine whether to render a quickfinder icon for multiple value lookups on the collection. Defaults to true
* summaryTitle is the label of the summary
* attributeToHighlightOnDuplicateKey is the name of an attribute to highlight
if two records in the collection are the same based on the
duplicateIdentificationFields element.
*
*/
@Deprecated
public class MaintainableCollectionDefinition extends MaintainableItemDefinition implements CollectionDefinitionI {
private static final long serialVersionUID = -5617868782623587053L;
// logger
//private static Log LOG = LogFactory.getLog(MaintainableCollectionDefinition.class);
protected Class<? extends BusinessObject> businessObjectClass;
protected Class<? extends BusinessObject> sourceClassName;
protected String summaryTitle;
protected String attributeToHighlightOnDuplicateKey;
protected boolean includeAddLine = true;
protected boolean includeMultipleLookupLine = true;
private boolean alwaysAllowCollectionDeletion = false;
protected Map<String,MaintainableFieldDefinition> maintainableFieldMap = new HashMap<String, MaintainableFieldDefinition>();
protected Map<String,MaintainableCollectionDefinition> maintainableCollectionMap = new HashMap<String, MaintainableCollectionDefinition>();
protected Map<String,MaintainableFieldDefinition> summaryFieldMap = new HashMap<String, MaintainableFieldDefinition>();
protected Map<String,MaintainableFieldDefinition> duplicateIdentificationFieldMap = new HashMap<String, MaintainableFieldDefinition>();
protected List<MaintainableFieldDefinition> maintainableFields = new ArrayList<MaintainableFieldDefinition>();
protected List<MaintainableCollectionDefinition> maintainableCollections = new ArrayList<MaintainableCollectionDefinition>();
protected List<MaintainableFieldDefinition> summaryFields = new ArrayList<MaintainableFieldDefinition>();
protected List<MaintainableFieldDefinition> duplicateIdentificationFields = new ArrayList<MaintainableFieldDefinition>();
public MaintainableCollectionDefinition() {}
/**
* @return businessObjectClass
*/
public Class<? extends BusinessObject> getBusinessObjectClass() {
return businessObjectClass;
}
/**
* The BusinessObject class used for each row of this collection.
*/
public void setBusinessObjectClass(Class<? extends BusinessObject> businessObjectClass) {
if (businessObjectClass == null) {
throw new IllegalArgumentException("invalid (null) dataObjectClass");
}
this.businessObjectClass = businessObjectClass;
}
/**
* @return Collection of all lookupField MaintainableFieldDefinitions associated with this MaintainableCollectionDefinition, in
* the order in which they were added
*/
public List<MaintainableFieldDefinition> getMaintainableFields() {
return maintainableFields;
}
public List<? extends FieldDefinitionI> getFields() {
return maintainableFields;
}
/**
* Directly validate simple fields, call completeValidation on Definition fields.
*
* @see org.kuali.rice.krad.datadictionary.DataDictionaryDefinition#completeValidation(java.lang.Class, java.lang.Object)
*/
public void completeValidation(Class rootBusinessObjectClass, Class otherBusinessObjectClass) {
if (!DataDictionary.isCollectionPropertyOf(rootBusinessObjectClass, getName())) {
throw new AttributeValidationException("unable to find collection named '" + getName() + "' in rootBusinessObjectClass '" + rootBusinessObjectClass.getName() + "' (" + "" + ")");
}
if (dissallowDuplicateKey()) {
if (!DataDictionary.isPropertyOf(businessObjectClass, attributeToHighlightOnDuplicateKey)) {
throw new AttributeValidationException("unable to find attribute named '" + attributeToHighlightOnDuplicateKey + "'in dataObjectClass '" + businessObjectClass.getName() + "' of collection '" + getName() + "' in rootBusinessObjectClass '" + rootBusinessObjectClass.getName() + "' (" + "" + ")");
}
}
for (MaintainableFieldDefinition maintainableField : maintainableFields ) {
maintainableField.completeValidation(businessObjectClass, null);
}
for (MaintainableCollectionDefinition maintainableCollection : maintainableCollections ) {
maintainableCollection.completeValidation(businessObjectClass, null);
}
// for (MaintainableFieldDefinition summaryField : summaryFields ) {
// summaryField.completeValidation(dataObjectClass, null, validationCompletionUtils);
// }
//
// for (MaintainableFieldDefinition identifierField : duplicateIdentificationFields) {
// identifierField.completeValidation(dataObjectClass, null, validationCompletionUtils);
// }
}
/**
* @see java.lang.Object#toString()
*/
public String toString() {
return "MaintainableCollectionDefinition for " + getName();
}
public Class<? extends BusinessObject> getSourceClassName() {
return sourceClassName;
}
/** BusinessObject class which should be used for multiple value lookups for this collection.
*/
public void setSourceClassName(Class<? extends BusinessObject> sourceClass) {
this.sourceClassName = sourceClass;
}
public boolean getIncludeAddLine() {
return includeAddLine;
}
/** Control whether an "add" line should be included at the top of this collection. */
public void setIncludeAddLine(boolean includeAddLine) {
this.includeAddLine = includeAddLine;
}
/**
* @return Collection of all lookupField MaintainableCollectionDefinitions associated with this
* MaintainableCollectionDefinition, in the order in which they were added
*/
public List<MaintainableCollectionDefinition> getMaintainableCollections() {
return maintainableCollections;
}
public List<? extends CollectionDefinitionI> getCollections() {
return maintainableCollections;
}
/**
* @return Collection of all SummaryFieldDefinitions associated with this SummaryFieldDefinition, in the order in which they
* were added
*/
public List<? extends FieldDefinitionI> getSummaryFields() {
return summaryFields;
}
public boolean hasSummaryField(String key) {
return summaryFieldMap.containsKey(key);
}
public boolean isIncludeMultipleLookupLine() {
return includeMultipleLookupLine;
}
/** Set whether the multiple lookup line (and link) should appear above this collection. */
public void setIncludeMultipleLookupLine(boolean includeMultipleLookupLine) {
this.includeMultipleLookupLine = includeMultipleLookupLine;
}
public String getSummaryTitle() {
return summaryTitle;
}
/**
summaryTitle is the label of the summary
*/
public void setSummaryTitle(String overrideSummaryName) {
this.summaryTitle = overrideSummaryName;
}
public String getAttributeToHighlightOnDuplicateKey() {
return attributeToHighlightOnDuplicateKey;
}
/**
attributeToHighlightOnDuplicateKey is the name of an attribute to highlight
if two records in the collection are the same based on the
duplicateIdentificationFields element.
*/
public void setAttributeToHighlightOnDuplicateKey(String attributeToHighlightOnDuplicate) {
this.attributeToHighlightOnDuplicateKey = attributeToHighlightOnDuplicate;
}
public boolean dissallowDuplicateKey() {
return StringUtils.isNotBlank(getAttributeToHighlightOnDuplicateKey());
}
public List<MaintainableFieldDefinition> getDuplicateIdentificationFields() {
return duplicateIdentificationFields;
}
/** The list of fields to include in this collection. */
public void setMaintainableFields(List<MaintainableFieldDefinition> maintainableFields) {
maintainableFieldMap.clear();
for ( MaintainableFieldDefinition maintainableField : maintainableFields ) {
if (maintainableField == null) {
throw new IllegalArgumentException("invalid (null) maintainableField");
}
String fieldName = maintainableField.getName();
if (maintainableFieldMap.containsKey(fieldName)) {
throw new DuplicateEntryException("duplicate fieldName entry for field '" + fieldName + "'");
}
maintainableFieldMap.put(fieldName, maintainableField);
}
this.maintainableFields = maintainableFields;
}
/** The list of sub-collections to include in this collection. */
public void setMaintainableCollections(List<MaintainableCollectionDefinition> maintainableCollections) {
maintainableCollectionMap.clear();
for (MaintainableCollectionDefinition maintainableCollection : maintainableCollections ) {
if (maintainableCollection == null) {
throw new IllegalArgumentException("invalid (null) maintainableCollection");
}
String fieldName = maintainableCollection.getName();
if (maintainableCollectionMap.containsKey(fieldName)) {
throw new DuplicateEntryException("duplicate fieldName entry for field '" + fieldName + "'");
}
maintainableCollectionMap.put(fieldName, maintainableCollection);
}
this.maintainableCollections = maintainableCollections;
}
/**
The summaryFields element defines a set of summaryField
elements.
*/
public void setSummaryFields(List<MaintainableFieldDefinition> summaryFields) {
summaryFieldMap.clear();
for (MaintainableFieldDefinition summaryField : summaryFields ) {
if (summaryField == null) {
throw new IllegalArgumentException("invalid (null) summaryField");
}
String fieldName = summaryField.getName();
if (summaryFieldMap.containsKey(fieldName)) {
throw new DuplicateEntryException("duplicate fieldName entry for field '" + fieldName + "'");
}
summaryFieldMap.put(fieldName, summaryField);
}
this.summaryFields = summaryFields;
}
/**
The duplicateIdentificationFields element is used to define a set of
fields that will be used to determine if two records in the collection
are duplicates.
*/
public void setDuplicateIdentificationFields(List<MaintainableFieldDefinition> duplicateIdentificationFields) {
duplicateIdentificationFieldMap.clear();
for (MaintainableFieldDefinition identifierField : duplicateIdentificationFields) {
if (identifierField == null) {
throw new IllegalArgumentException("invalid (null) identifierField");
}
String fieldName = identifierField.getName();
if (duplicateIdentificationFieldMap.containsKey(fieldName)) {
throw new DuplicateEntryException("duplicate fieldName entry for field '" + fieldName + "'");
}
duplicateIdentificationFieldMap.put(fieldName, identifierField);
}
this.duplicateIdentificationFields = duplicateIdentificationFields;
}
public boolean isAlwaysAllowCollectionDeletion() {
return this.alwaysAllowCollectionDeletion;
}
public void setAlwaysAllowCollectionDeletion(
boolean alwaysAllowCollectionDeletion) {
this.alwaysAllowCollectionDeletion = alwaysAllowCollectionDeletion;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package sql.sqlCallback;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import com.gemstone.gemfire.cache.query.Struct;
import hydra.Log;
import hydra.RemoteTestModule;
import hydra.TestConfig;
import sql.SQLHelper;
import sql.SQLPrms;
import sql.SQLTest;
import sql.dmlStatements.DMLStmtIF;
import sql.sqlutil.ResultSetHelper;
import util.TestException;
public class SQLWriterTest extends SQLTest {
protected static SQLWriterTest sqlWriterTest;
public static String backendDB_url = TestConfig.tab().stringAt(SQLPrms.backendDB_url, "jdbc:derby:test");
public static synchronized void HydraTask_initialize() {
if (sqlWriterTest == null) {
sqlWriterTest = new SQLWriterTest();
}
}
public static void HydraTask_doDMLOp() {
sqlWriterTest.doDMLOp();
}
protected void doDMLOp(Connection dConn, Connection gConn) {
Log.getLogWriter().info("performing dmlOp, myTid is " + getMyTid());
int table = dmlTables[random.nextInt(dmlTables.length)]; //get random table to perform dml
DMLStmtIF dmlStmt= dmlFactory.createWriterDMLStmt(table); //dmlStmt of a table
int upto = 10; //maxim records to be manipulated in one op
int size = random.nextInt(upto);
if (setTx) size = 1; //avoid #43725, partial txn ops sync to derby but txn failed afterwards
if (setCriticalHeap) resetCanceledFlag();
if (setTx && isHATest) resetNodeFailureFlag();
if (setTx && testEviction) resetEvictionConflictFlag();
//perform the opeartions
String operation = TestConfig.tab().stringAt(SQLPrms.dmlOperations);
if (operation.equals("insert"))
dmlStmt.insert(dConn, gConn, size);
else if (operation.equals("update"))
dmlStmt.update(dConn, gConn, size);
else if (operation.equals("delete"))
dmlStmt.delete(dConn, gConn);
else if (operation.equals("query")) {
if (testUniqueKeys)
dmlStmt.query(dConn, gConn); //query derby database throughout the test
else
dmlStmt.query(null, gConn); //verify at the end of the test, only process the gemfirexd resultSet
}
else
throw new TestException("Unknown entry operation: " + operation);
try {
if (dConn!=null) {
dConn.commit(); //derby connection is not null;
closeDiscConnection(dConn);
Log.getLogWriter().info("closed the disc connection");
}
gConn.commit();
closeGFEConnection(gConn);
} catch (SQLException se) {
SQLHelper.handleSQLException (se);
}
Log.getLogWriter().info("done dmlOp");
}
public static void HydraTask_populateTables(){
sqlWriterTest.populateTables();
}
protected void populateTables(Connection dConn, Connection gConn) {
//int initSize = random.nextInt(10)+1;
//int initSize = 10;
for (int i=0; i<dmlTables.length; i++) {
DMLStmtIF dmlStmt= dmlFactory.createWriterDMLStmt(dmlTables[i]);
dmlStmt.populate(null, gConn);
try {
gConn.commit();
} catch (SQLException se) {
SQLHelper.handleSQLException(se);
}
}
}
public static void HydraTask_createWriter() {
sqlWriterTest.createWriters();
}
protected void createWriters() {
try {
Connection conn = getGFEConnection();
String[] tableNames = SQLPrms.getTableNames();
for (String tableName: tableNames) {
createWriter(conn, tableName);
conn.commit();
}
} catch (SQLException se) {
SQLHelper.handleSQLException(se);
}
}
protected void createWriter(Connection conn, String tableName) throws SQLException {
String[] str = tableName.split("\\.");
String schema = str[0];
String table = str[1];
String tableWriter = table.substring(0, 1).toUpperCase() + table.substring(1) + "Writer";
CallableStatement cs = conn.prepareCall("CALL SYS.ATTACH_WRITER(?,?,?,?,?)");
cs.setString(1, schema);
cs.setString(2, table);
cs.setString(3, "sql.sqlCallback.writer." + tableWriter);
cs.setString(4, backendDB_url);
cs.setString(5, "");
cs.execute();
Log.getLogWriter().info("attach the writer " + tableWriter
+ " for " + tableName);
}
public static void HydraTask_clearTables() {
sqlWriterTest.clearTables();
}
protected void clearTables() {
if (!hasDerbyServer) return;
//if (getMyTid() > 6) return; /*work around #42237, which need performance test to track*/
Connection gConn = getGFEConnection();
clearTablesInOrder(null, gConn);
commit(gConn);
closeGFEConnection(gConn);
if (getMyTid() == ddlThread)
verifyResultSets();
}
protected void clearTablesInOrder(Connection dConn, Connection gConn) {
clearTables(dConn, gConn, "trade", "buyorders");
clearTables(dConn, gConn, "trade", "txhistory");
clearTables(dConn, gConn, "trade", "sellorders");
clearTables(dConn, gConn, "trade", "portfolio");
clearTables(dConn, gConn, "trade", "networth");
clearTables(dConn, gConn, "trade", "customers");
clearTables(dConn, gConn, "trade", "securities");
}
//delete all records in the tables
protected void clearTables(Connection dConn, Connection gConn) {
/*try {
ResultSet rs = gConn.createStatement().executeQuery("select tableschemaname, tablename "
+ "from sys.systables where tabletype = 'T' ");
while (rs.next()) {
String schemaName = rs.getString(1);
String tableName = rs.getString(2);
clearTables(dConn, gConn, schemaName, tableName);
}
*/
try {
String sql = "select tableschemaname, tablename "
+ "from sys.systables where tabletype = 'T' and tableschemaname not like 'SYS%'";
ResultSet rs = gConn.createStatement().executeQuery(sql);
Log.getLogWriter().info(sql);
List<Struct> list = ResultSetHelper.asList(rs, false);
Log.getLogWriter().info(ResultSetHelper.listToString(list));
for (Struct e: list) {
Object[] table = e.getFieldValues();
clearTables(dConn, gConn, (String)table[0], (String)table[1]);
}
} catch (SQLException se) {
SQLHelper.handleSQLException(se);
}
/* to reproduce #42307
if (RemoteTestModule.getCurrentThread().getThreadId() == 0)
sqlWriterTest.verifyResultSets();
*/
}
protected void clearTables(Connection dConn, Connection gConn, String schema, String table) {
boolean testTruncate = false; //due to #42307
int gCount = 0;
String delete = "delete from " + schema + "." + table;
String truncate = "truncate table " + schema + "." + table;
try {
//work around truncate table issue such as #42377, #43272 etc
if (setTx) {
if (getMyTid() == ddlThread) {
Log.getLogWriter().info(delete);
gCount = gConn.createStatement().executeUpdate(delete);
Log.getLogWriter().info("gemfirexd deletes " + gCount + " rows from " + table );
//single thread execution to avoid conflict exception which could lead to #43725
}
return;
}
//for original non txn case
if (RemoteTestModule.getCurrentThread().getThreadId() != 0) {
if (testTruncate) return;
Log.getLogWriter().info(delete);
gCount = gConn.createStatement().executeUpdate(delete);
Log.getLogWriter().info("gemfirexd deletes " + gCount + " rows from " + table );
}
else {
if (testTruncate) {
Log.getLogWriter().info(truncate);
gCount = gConn.createStatement().executeUpdate(truncate);
Log.getLogWriter().info("gemfirexd truncate table returns " + gCount );
}
}
} catch (SQLException se) {
if (se.getSQLState().equalsIgnoreCase("23503")) {
Log.getLogWriter().info("could not delete due to delete restrict in gfxd");
} else if (se.getSQLState().equalsIgnoreCase("XCL48")) {
Log.getLogWriter().info("could not truncate due to foreign key reference in gfxd");
} else if (setTx && se.getSQLState().equalsIgnoreCase("X0Z02")) {
Log.getLogWriter().info("Got expected conflict exception using txn");
//TODO do not compare exception here --
//derby may delete the rows but gfxd not
//may need to add rollback derby op here.
return;
} else
SQLHelper.handleSQLException(se);
}
commit(gConn);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.devicefarm.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents a request to the list artifacts operation.
* </p>
*/
public class ListArtifactsRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* The Run, Job, Suite, or Test ARN.
* </p>
*/
private String arn;
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*/
private String type;
/**
* <p>
* An identifier that was returned from the previous call to this operation,
* which can be used to return the next set of items in the list.
* </p>
*/
private String nextToken;
/**
* <p>
* The Run, Job, Suite, or Test ARN.
* </p>
*
* @param arn
* The Run, Job, Suite, or Test ARN.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The Run, Job, Suite, or Test ARN.
* </p>
*
* @return The Run, Job, Suite, or Test ARN.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The Run, Job, Suite, or Test ARN.
* </p>
*
* @param arn
* The Run, Job, Suite, or Test ARN.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListArtifactsRequest withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*
* @param type
* The artifacts' type.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* @see ArtifactCategory
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*
* @return The artifacts' type.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* @see ArtifactCategory
*/
public String getType() {
return this.type;
}
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*
* @param type
* The artifacts' type.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ArtifactCategory
*/
public ListArtifactsRequest withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*
* @param type
* The artifacts' type.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ArtifactCategory
*/
public void setType(ArtifactCategory type) {
this.type = type.toString();
}
/**
* <p>
* The artifacts' type.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* </ul>
*
* @param type
* The artifacts' type.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>FILE: The artifacts are files.</li>
* <li>LOG: The artifacts are logs.</li>
* <li>SCREENSHOT: The artifacts are screenshots.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ArtifactCategory
*/
public ListArtifactsRequest withType(ArtifactCategory type) {
setType(type);
return this;
}
/**
* <p>
* An identifier that was returned from the previous call to this operation,
* which can be used to return the next set of items in the list.
* </p>
*
* @param nextToken
* An identifier that was returned from the previous call to this
* operation, which can be used to return the next set of items in
* the list.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* An identifier that was returned from the previous call to this operation,
* which can be used to return the next set of items in the list.
* </p>
*
* @return An identifier that was returned from the previous call to this
* operation, which can be used to return the next set of items in
* the list.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* An identifier that was returned from the previous call to this operation,
* which can be used to return the next set of items in the list.
* </p>
*
* @param nextToken
* An identifier that was returned from the previous call to this
* operation, which can be used to return the next set of items in
* the list.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListArtifactsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArn() != null)
sb.append("Arn: " + getArn() + ",");
if (getType() != null)
sb.append("Type: " + getType() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListArtifactsRequest == false)
return false;
ListArtifactsRequest other = (ListArtifactsRequest) obj;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null
&& other.getArn().equals(this.getArn()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null
&& other.getType().equals(this.getType()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode
+ ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListArtifactsRequest clone() {
return (ListArtifactsRequest) super.clone();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TestIncrementsFromClientSide;
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
/**
* Increments with some concurrency against a region to ensure we get the right answer.
* Test is parameterized to run the fast and slow path increments; if fast,
* HRegion.INCREMENT_FAST_BUT_NARROW_CONSISTENCY_KEY is true.
*
* <p>There is similar test up in TestAtomicOperation. It does a test where it has 100 threads
* doing increments across two column families all on one row and the increments are connected to
* prove atomicity on row.
*/
@Category(MediumTests.class)
public class TestRegionIncrement {
private static final Log LOG = LogFactory.getLog(TestRegionIncrement.class);
@Rule public TestName name = new TestName();
@Rule public final TestRule timeout =
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build();
private static HBaseTestingUtility TEST_UTIL;
private final static byte [] INCREMENT_BYTES = Bytes.toBytes("increment");
private static final int THREAD_COUNT = 10;
private static final int INCREMENT_COUNT = 10000;
@Before
public void setUp() throws Exception {
TEST_UTIL = HBaseTestingUtility.createLocalHTU();
}
@After
public void tearDown() throws Exception {
TEST_UTIL.cleanupTestDir();
}
private HRegion getRegion(final Configuration conf, final String tableName) throws IOException {
WAL wal = new FSHLog(FileSystem.get(conf), TEST_UTIL.getDataTestDir(),
TEST_UTIL.getDataTestDir().toString(), conf);
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
return (HRegion)TEST_UTIL.createLocalHRegion(Bytes.toBytes(tableName),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, tableName, conf,
false, Durability.SKIP_WAL, wal, INCREMENT_BYTES);
}
private void closeRegion(final HRegion region) throws IOException {
region.close();
region.getWAL().close();
}
@Test
public void testMVCCCausingMisRead() throws IOException {
final HRegion region = getRegion(TEST_UTIL.getConfiguration(), this.name.getMethodName());
try {
// ADD TEST HERE!!
} finally {
closeRegion(region);
}
}
/**
* Increments a single cell a bunch of times.
*/
private static class SingleCellIncrementer extends Thread {
private final int count;
private final HRegion region;
private final Increment increment;
SingleCellIncrementer(final int i, final int count, final HRegion region,
final Increment increment) {
super("" + i);
setDaemon(true);
this.count = count;
this.region = region;
this.increment = increment;
}
@Override
public void run() {
for (int i = 0; i < this.count; i++) {
try {
this.region.increment(this.increment);
// LOG.info(getName() + " " + i);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
/**
* Increments a random row's Cell <code>count</code> times.
*/
private static class CrossRowCellIncrementer extends Thread {
private final int count;
private final HRegion region;
private final Increment [] increments;
CrossRowCellIncrementer(final int i, final int count, final HRegion region, final int range) {
super("" + i);
setDaemon(true);
this.count = count;
this.region = region;
this.increments = new Increment[range];
for (int ii = 0; ii < range; ii++) {
this.increments[ii] = new Increment(Bytes.toBytes(i));
this.increments[ii].addColumn(INCREMENT_BYTES, INCREMENT_BYTES, 1);
}
}
@Override
public void run() {
for (int i = 0; i < this.count; i++) {
try {
int index = ThreadLocalRandom.current().nextInt(0, this.increments.length);
this.region.increment(this.increments[index]);
// LOG.info(getName() + " " + index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
/**
* Have each thread update its own Cell. Avoid contention with another thread.
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testUnContendedSingleCellIncrement()
throws IOException, InterruptedException {
final HRegion region = getRegion(TEST_UTIL.getConfiguration(),
TestIncrementsFromClientSide.filterStringSoTableNameSafe(this.name.getMethodName()));
long startTime = System.currentTimeMillis();
try {
SingleCellIncrementer [] threads = new SingleCellIncrementer[THREAD_COUNT];
for (int i = 0; i < threads.length; i++) {
byte [] rowBytes = Bytes.toBytes(i);
Increment increment = new Increment(rowBytes);
increment.addColumn(INCREMENT_BYTES, INCREMENT_BYTES, 1);
threads[i] = new SingleCellIncrementer(i, INCREMENT_COUNT, region, increment);
}
for (int i = 0; i < threads.length; i++) {
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
RegionScanner regionScanner = region.getScanner(new Scan());
List<Cell> cells = new ArrayList<>(THREAD_COUNT);
while(regionScanner.next(cells)) continue;
assertEquals(THREAD_COUNT, cells.size());
long total = 0;
for (Cell cell: cells) total +=
Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
assertEquals(INCREMENT_COUNT * THREAD_COUNT, total);
} finally {
closeRegion(region);
LOG.info(this.name.getMethodName() + " " + (System.currentTimeMillis() - startTime) + "ms");
}
}
/**
* Have each thread update its own Cell. Avoid contention with another thread.
* This is
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testContendedAcrossCellsIncrement()
throws IOException, InterruptedException {
final HRegion region = getRegion(TEST_UTIL.getConfiguration(),
TestIncrementsFromClientSide.filterStringSoTableNameSafe(this.name.getMethodName()));
long startTime = System.currentTimeMillis();
try {
CrossRowCellIncrementer [] threads = new CrossRowCellIncrementer[THREAD_COUNT];
for (int i = 0; i < threads.length; i++) {
threads[i] = new CrossRowCellIncrementer(i, INCREMENT_COUNT, region, THREAD_COUNT);
}
for (int i = 0; i < threads.length; i++) {
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
RegionScanner regionScanner = region.getScanner(new Scan());
List<Cell> cells = new ArrayList<>(100);
while(regionScanner.next(cells)) continue;
assertEquals(THREAD_COUNT, cells.size());
long total = 0;
for (Cell cell: cells) total +=
Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
assertEquals(INCREMENT_COUNT * THREAD_COUNT, total);
} finally {
closeRegion(region);
LOG.info(this.name.getMethodName() + " " + (System.currentTimeMillis() - startTime) + "ms");
}
}
}
| |
/*
* Copyright (C) 2016-2018 SignalFx, Inc. All rights reserved.
*/
package com.signalfx.signalflow;
import java.util.Collections;
import java.util.Map;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
/**
* SignalFx SignalFlow client.
*
* Client for SignalFx's SignalFlow real-time analytics API. Allows for the execution of ad-hoc
* computations, returning its output in real-time as it is produced; to start new background
* computations; attach, keep alive or stop existing computations.
*
* @author dgriff
*/
public class SignalFlowClient {
private SignalFlowTransport transport;
/**
* Client Constructor that uses default transport/settings
*
* @param token
* user api token
*/
public SignalFlowClient(String token) {
this(new WebSocketTransport.TransportBuilder(token).build());
}
/**
* Client Constructor that uses custom transport
*
* @param transport
* custom created transport
*/
public SignalFlowClient(SignalFlowTransport transport) {
this.transport = transport;
}
/**
* Execute the given SignalFlow program and stream the output back.
*
* @param program
* computation written in signalflow language
* @return computation instance
*/
public Computation execute(String program) {
return new Computation(this.transport, program, Collections.<String, String> emptyMap(),
false);
}
/**
* This method is deprecated and will be removed in the next major release. Use
* {@link #execute(String, Long, Long, Long, Long, Boolean, Boolean)} instead
*
* Execute the given SignalFlow program with parameters and stream the output back.
*
* @param program
* computation written in signalflow language
* @param start
* Optional millisecond start timestamp
* @param stop
* Optional millisecond stop timestamp
* @param resolution
* Optional desired data resolution, in milliseconds
* @param maxDelay
* Optional desired maximum data delay, in milliseconds
* @param persistent
* Optional persistent setting
* @return computation instance
*/
@Deprecated
public Computation execute(String program, long start, long stop, long resolution,
long maxDelay, boolean persistent) {
return execute(program, start, stop, resolution, maxDelay, persistent, false);
}
/**
* Execute the given SignalFlow program with parameters and stream the output back.
*
* @param program
* computation written in signalflow language
* @param start
* Optional timestamp in milliseconds since epoch. Defaults to the current timestamp.
* @param stop
* Optional timestamp in milliseconds since epoch. Defaults to infinity.
* @param resolution
* Optional the minimum desired data resolution, in milliseconds. This allows the
* client to put an upper bound on the number of datapoints in the computation
* output.
* @param maxDelay
* Optional desired maximum data delay, in milliseconds between 1 and 900000. When
* set to zero or unset, max delay will be evaluated dynamically based on the
* historical lag information of the input data.
* @param persistent
* Optional persistent setting
* @param immediate
* Optional adjusts the stop timestamp so that the computation doesn't wait for
* future data to be available
* @return computation instance
*/
public Computation execute(String program, Long start, Long stop, Long resolution,
Long maxDelay, Boolean persistent, Boolean immediate) {
Map<String, String> params = buildParams("start", start, "stop", stop, "resolution",
resolution, "maxDelay", maxDelay, "persistent", persistent, "immediate", immediate);
return new Computation(this.transport, program, params, false);
}
/**
* Start executing the given SignalFlow program without being attached to the output of the
* computation.
*
* @param program
* computation written in signalflow language
*/
public void start(String program) {
this.transport.start(program, Collections.<String, String> emptyMap());
}
/**
* Start executing the given SignalFlow program without being attached to the output of the
* computation.
*
* @param program
* computation written in signalflow language
* @param start
* Optional millisecond start timestamp
* @param stop
* Optional millisecond stop timestamp
* @param resolution
* Optional desired data resolution, in milliseconds
* @param maxDelay
* Optional desired maximum data delay, in milliseconds
*/
public void start(String program, long start, long stop, long resolution, long maxDelay) {
Map<String, String> params = buildParams("start", start, "stop", stop, "resolution",
resolution, "maxDelay", maxDelay);
this.transport.start(program, params);
}
/**
* Stop a SignalFlow computation
*
* @param computation
* computation instance
* @param reason
* Optional description of why stop was called
*/
public void stop(Computation computation, String reason) {
stop(computation.getId(), reason);
computation.close();
}
/**
* Stop a SignalFlow computation
*
* @param handle
* computation id
* @param reason
* Optional description of why stop was called
*/
public void stop(String handle, String reason) {
Map<String, String> params = buildParams("reason", reason);
this.transport.stop(handle, params);
}
/**
* Keepalive a SignalFlow computation.
*
* @param handle
* computation id
*/
public void keepalive(String handle) {
this.transport.keepalive(handle);
}
/**
* Attach to an existing SignalFlow computation.
*
* @param handle
* computation id
* @param filters
* filter written in signalflow language
* @param resolution
* Optional desired data resolution, in milliseconds
* @return computation instance
*/
public Computation attach(String handle, String filters, long resolution) {
return new Computation(this.transport, handle,
buildParams("filters", filters, "resolution", resolution), true);
}
/**
* Close this SignalFlow client.
*/
public void close() {
this.transport.close(1000, null);
}
private static Map<String, String> buildParams(Object... params) {
Preconditions.checkArgument(params.length % 2 == 0);
ImmutableMap.Builder<String, String> builder = new ImmutableMap.Builder<String, String>();
for (int i = 0; i < params.length; i += 2) {
if (params[i] != null && params[i + 1] != null) {
builder.put(params[i].toString(), params[i + 1].toString());
}
}
return builder.build();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.logic.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.logic.fluent.IntegrationServiceEnvironmentManagedApiOperationsClient;
import com.azure.resourcemanager.logic.fluent.models.ApiOperationInner;
import com.azure.resourcemanager.logic.models.ApiOperationListResult;
import reactor.core.publisher.Mono;
/**
* An instance of this class provides access to all the operations defined in
* IntegrationServiceEnvironmentManagedApiOperationsClient.
*/
public final class IntegrationServiceEnvironmentManagedApiOperationsClientImpl
implements IntegrationServiceEnvironmentManagedApiOperationsClient {
private final ClientLogger logger =
new ClientLogger(IntegrationServiceEnvironmentManagedApiOperationsClientImpl.class);
/** The proxy service used to perform REST calls. */
private final IntegrationServiceEnvironmentManagedApiOperationsService service;
/** The service client containing this operation class. */
private final LogicManagementClientImpl client;
/**
* Initializes an instance of IntegrationServiceEnvironmentManagedApiOperationsClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
IntegrationServiceEnvironmentManagedApiOperationsClientImpl(LogicManagementClientImpl client) {
this.service =
RestProxy
.create(
IntegrationServiceEnvironmentManagedApiOperationsService.class,
client.getHttpPipeline(),
client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for
* LogicManagementClientIntegrationServiceEnvironmentManagedApiOperations to be used by the proxy service to perform
* REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "LogicManagementClien")
private interface IntegrationServiceEnvironmentManagedApiOperationsService {
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Logic"
+ "/integrationServiceEnvironments/{integrationServiceEnvironmentName}/managedApis/{apiName}"
+ "/apiOperations")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<ApiOperationListResult>> list(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroup") String resourceGroup,
@PathParam("integrationServiceEnvironmentName") String integrationServiceEnvironmentName,
@PathParam("apiName") String apiName,
@QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<ApiOperationListResult>> listNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ApiOperationInner>> listSinglePageAsync(
String resourceGroup, String integrationServiceEnvironmentName, String apiName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroup == null) {
return Mono.error(new IllegalArgumentException("Parameter resourceGroup is required and cannot be null."));
}
if (integrationServiceEnvironmentName == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter integrationServiceEnvironmentName is required and cannot be null."));
}
if (apiName == null) {
return Mono.error(new IllegalArgumentException("Parameter apiName is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroup,
integrationServiceEnvironmentName,
apiName,
this.client.getApiVersion(),
accept,
context))
.<PagedResponse<ApiOperationInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ApiOperationInner>> listSinglePageAsync(
String resourceGroup, String integrationServiceEnvironmentName, String apiName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroup == null) {
return Mono.error(new IllegalArgumentException("Parameter resourceGroup is required and cannot be null."));
}
if (integrationServiceEnvironmentName == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter integrationServiceEnvironmentName is required and cannot be null."));
}
if (apiName == null) {
return Mono.error(new IllegalArgumentException("Parameter apiName is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroup,
integrationServiceEnvironmentName,
apiName,
this.client.getApiVersion(),
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<ApiOperationInner> listAsync(
String resourceGroup, String integrationServiceEnvironmentName, String apiName) {
return new PagedFlux<>(
() -> listSinglePageAsync(resourceGroup, integrationServiceEnvironmentName, apiName),
nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<ApiOperationInner> listAsync(
String resourceGroup, String integrationServiceEnvironmentName, String apiName, Context context) {
return new PagedFlux<>(
() -> listSinglePageAsync(resourceGroup, integrationServiceEnvironmentName, apiName, context),
nextLink -> listNextSinglePageAsync(nextLink, context));
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<ApiOperationInner> list(
String resourceGroup, String integrationServiceEnvironmentName, String apiName) {
return new PagedIterable<>(listAsync(resourceGroup, integrationServiceEnvironmentName, apiName));
}
/**
* Gets the managed Api operations.
*
* @param resourceGroup The resource group.
* @param integrationServiceEnvironmentName The integration service environment name.
* @param apiName The api name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the managed Api operations.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<ApiOperationInner> list(
String resourceGroup, String integrationServiceEnvironmentName, String apiName, Context context) {
return new PagedIterable<>(listAsync(resourceGroup, integrationServiceEnvironmentName, apiName, context));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of managed API operations.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ApiOperationInner>> listNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<ApiOperationInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the list of managed API operations.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ApiOperationInner>> listNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| |
package com.thetransactioncompany.cors;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Enumeration;
import java.util.Properties;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.catalina.comet.CometEvent;
import org.apache.catalina.comet.CometFilter;
import org.apache.catalina.comet.CometFilterChain;
/**
* Cross-Origin Resource Sharing (CORS) servlet filter.
*
* <p>The filter intercepts incoming HTTP requests and applies the CORS
* policy as specified by the filter init parameters. The actual CORS
* request is processed by the {@link CORSRequestHandler} class.
*
* <p>Supported filter init parameters:
*
* <ul>
* <li>cors.allowGenericHttpRequests {true|false} defaults to {@code true}.
* <li>cors.allowOrigin {"*"|origin-list} defaults to {@code *}.
* <li>cors.supportedMethods {method-list} defaults to {@code "GET, POST, HEAD, OPTIONS"}.
* <li>cors.supportedHeaders {header-list} defaults to empty list.
* <li>cors.exposedHeaders {header-list} defaults to empty list.
* <li>cors.supportsCredentials {true|false} defaults to {@code true}.
* <li>cors.maxAge {int} defaults to {@code -1} (unspecified).
* </ul>
*
* @author <a href="http://dzhuvinov.com">Vladimir Dzhuvinov</a>
* @version 1.3.3 (2012-06-22)
*/
public class CORSCometFilter implements CometFilter {
/**
* The CORS filer configuration.
*/
private CORSConfiguration config;
/**
* Encapsulates the CORS request handling logic.
*/
private CORSRequestHandler handler;
/**
* Converts the initial filter parameters (typically specified in the
* {@code web.xml} file) to a Java properties hashtable. The parameter
* names become property keys.
*
* @param config The filter configuration.
*
* @return The context parameters as Java properties.
*/
private static Properties getFilterInitParameters(final FilterConfig config) {
Properties props = new Properties();
Enumeration en = config.getInitParameterNames();
while (en.hasMoreElements()) {
String key = (String)en.nextElement();
String value = config.getInitParameter(key);
props.setProperty(key, value);
}
return props;
}
/**
* Serialises the items of an array into a string. Each item must have a
* meaningful {@code toString()} method.
*
* @param array The array to serialise, may be {@code null}.
* @param sep The string separator to apply.
*
* @return The serialised array as string.
*/
private static String serialize(final Object[] array, final String sep) {
if (array == null)
return "";
String s = "";
for (int i=0; i<array.length; i++) {
s = s + array[i].toString();
if (i+1 < array.length)
s = s + sep;
}
return s;
}
/**
* This method is invoked by the web container to initialise the
* filter at startup.
*
* @param filterConfig The filter configuration.
*
* @throws ServletException On a filter initialisation exception.
*/
public void init(final FilterConfig filterConfig)
throws ServletException {
// Get the init params
Properties props = getFilterInitParameters(filterConfig);
// Extract and parse all required CORS filter properties
try {
config = new CORSConfiguration(props);
} catch (CORSConfigurationException e) {
throw new ServletException(e);
}
handler = new CORSRequestHandler(config);
}
/**
* Produces a simple HTTP text/plain response with the specified status
* code and message.
*
* <p>Note: The CORS filter avoids falling back to the default web
* container error page (typically a richly-formatted HTML page) to make
* it easier for XHR debugger tools to identify the cause of failed
* requests.
*
* @param sc The HTTP status code.
* @param msg The message.
*
* @throws IOException On a I/O exception.
* @throws ServletException On a general request processing exception.
*/
private void printMessage(final HttpServletResponse response, final int sc, final String msg)
throws IOException, ServletException {
// Set the status code
response.setStatus(sc);
// Write the error message
response.resetBuffer();
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
out.println("Cross-Origin Resource Sharing (CORS) Filter: " + msg);
}
/**
* Filters an HTTP request/reponse pair according to the configured CORS
* policy. Also tags the request with CORS information to downstream
* handlers.
*
* @param request The servlet request.
* @param response The servlet response.
* @param chain The filter chain.
*
* @throws IOException On a I/O exception.
* @throws ServletException On a general request processing exception.
*/
private void doFilter(final HttpServletRequest request, final HttpServletResponse response, final FilterChain chain)
throws IOException, ServletException {
// Tag
handler.tagRequest(request);
CORSRequestType type = CORSRequestType.detect(request);
try {
if (type == CORSRequestType.ACTUAL) {
// Simple/actual CORS request
handler.handleActualRequest(request, response);
chain.doFilter(request, response);
}
else if (type == CORSRequestType.PREFLIGHT) {
// Preflight CORS request, handle but don't pass
// further down the chain
handler.handlePreflightRequest(request, response);
}
else if (config.allowGenericHttpRequests) {
// Not a CORS request, but allow it through
request.setAttribute("cors.isCorsRequest", false); // tag
chain.doFilter(request, response);
}
else {
// Generic HTTP requests denied
request.setAttribute("cors.isCorsRequest", false); // tag
printMessage(response, HttpServletResponse.SC_FORBIDDEN, "Generic HTTP requests not allowed");
}
} catch (InvalidCORSRequestException e) {
request.setAttribute("cors.isCorsRequest", false); // tag
printMessage(response, HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} catch (CORSOriginDeniedException e) {
String msg = e.getMessage() + ": " + serialize(e.getRequestOrigins(), " ");
printMessage(response, HttpServletResponse.SC_FORBIDDEN, msg);
} catch (UnsupportedHTTPMethodException e) {
String msg = e.getMessage();
HTTPMethod method = e.getRequestedMethod();
if (method != null)
msg = msg + ": " + method.toString();
printMessage(response, HttpServletResponse.SC_METHOD_NOT_ALLOWED, msg);
} catch (UnsupportedHTTPHeaderException e) {
String msg = e.getMessage();
HeaderFieldName header = e.getRequestHeader();
if (header != null)
msg = msg + ": " + header.toString();
printMessage(response, HttpServletResponse.SC_FORBIDDEN, msg);
}
}
/**
* Called by the servlet container each time a request/response pair is
* passed through the chain due to a client request for a resource at
* the end of the chain.
*
* @param request The servlet request.
* @param response The servlet response.
* @param chain The filter chain.
*
* @throws IOException On a I/O exception.
* @throws ServletException On a general request processing exception.
*/
public void doFilter(final ServletRequest request, final ServletResponse response, final FilterChain chain)
throws IOException, ServletException {
if (request instanceof HttpServletRequest && response instanceof HttpServletResponse) {
// Cast to HTTP
doFilter((HttpServletRequest)request, (HttpServletResponse)response, chain);
}
else {
throw new ServletException("Cannot filter non-HTTP requests/responses");
}
}
public void doFilterEvent(final CometEvent event, final CometFilterChain chain) throws IOException, ServletException {
HttpServletRequest request = event.getHttpServletRequest();
HttpServletResponse response = event.getHttpServletResponse();
// Tag
handler.tagRequest(request);
CORSRequestType type = CORSRequestType.detect(request);
try {
if (type == CORSRequestType.ACTUAL) {
// Simple/actual CORS request
handler.handleActualRequest(request, response);
chain.doFilterEvent(event);
} else if (type == CORSRequestType.PREFLIGHT) {
// Preflight CORS request, handle but don't pass
// further down the chain
handler.handlePreflightRequest(request, response);
chain.doFilterEvent(event);
} else if (config.allowGenericHttpRequests) {
// Not a CORS request, but allow it through
request.setAttribute("cors.isCorsRequest", false); // tag
chain.doFilterEvent(event);
} else {
// Generic HTTP requests denied
request.setAttribute("cors.isCorsRequest", false); // tag
printMessage(response, HttpServletResponse.SC_FORBIDDEN,
"Generic HTTP requests not allowed");
}
} catch (InvalidCORSRequestException e) {
request.setAttribute("cors.isCorsRequest", false); // tag
printMessage(response, HttpServletResponse.SC_BAD_REQUEST,
e.getMessage());
} catch (CORSOriginDeniedException e) {
String msg = e.getMessage() + ": "
+ serialize(e.getRequestOrigins(), " ");
printMessage(response, HttpServletResponse.SC_FORBIDDEN, msg);
} catch (UnsupportedHTTPMethodException e) {
String msg = e.getMessage();
HTTPMethod method = e.getRequestedMethod();
if (method != null)
msg = msg + ": " + method.toString();
printMessage(response, HttpServletResponse.SC_METHOD_NOT_ALLOWED,
msg);
} catch (UnsupportedHTTPHeaderException e) {
String msg = e.getMessage();
HeaderFieldName header = e.getRequestHeader();
if (header != null)
msg = msg + ": " + header.toString();
printMessage(response, HttpServletResponse.SC_FORBIDDEN, msg);
}
}
/**
* Called by the web container to indicate to a filter that it is being
* taken out of service.
*/
public void destroy() {
// do nothing
}
}
| |
/**
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.ingestion.sink.mongodb;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.flume.Event;
import org.bson.types.ObjectId;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.io.BaseEncoding;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.util.JSON;
import com.mongodb.util.JSONParseException;
class EventParser {
private static final Logger log = LoggerFactory.getLogger(EventParser.class);
private static final String DEFAULT_BINARY_ENCODING = "base64";
private static final String DOCUMENT_TYPE = "document";
private final MappingDefinition definition;
public EventParser() {
this(new MappingDefinition());
}
public EventParser(final MappingDefinition mappingDefinition) {
this.definition = mappingDefinition;
}
public Object parseValue(final FieldDefinition fd, final String stringValue) {
if (fd == null || fd.getType() == null) {
try {
return JSON.parse(stringValue);
} catch (JSONParseException ex) {
// XXX: Default to String
log.trace("Could not parse as JSON, defaulting to String: {}", stringValue);
return stringValue;
}
}
switch (fd.getType()) {
case DOUBLE:
return Double.parseDouble(stringValue);
case STRING:
return stringValue;
case OBJECT:
case ARRAY:
// TODO: should we use customizable array representation?
// TODO: should we check that the result is indeed an array or object?
return JSON.parse(stringValue);
case BINARY:
SimpleFieldDefinition sfd = (SimpleFieldDefinition) fd;
final String encoding = (sfd.getEncoding() == null) ? DEFAULT_BINARY_ENCODING : sfd.getEncoding()
.toLowerCase(Locale.ENGLISH);
if ("base64".equals(encoding)) {
return BaseEncoding.base64().decode(stringValue);
} else {
throw new UnsupportedOperationException("Unsupported encoding for binary type: " + encoding);
}
// TODO: case "UNDEFINED":
case OBJECTID:
return new ObjectId(stringValue);
case BOOLEAN:
return Boolean.parseBoolean(stringValue);
case DATE:
DateFormat dateFormat = ((DateFieldDefinition) fd).getDateFormat();
if (dateFormat == null) {
if (NumberUtils.isNumber(stringValue)) {
return new Date(Long.parseLong(stringValue));
} else {
return ISODateTimeFormat.dateOptionalTimeParser().parseDateTime(stringValue).toDate();
}
} else {
try {
return dateFormat.parse(stringValue);
} catch (ParseException ex) {
// XXX: Default to string
log.warn("Could not parse date, defaulting to String: {}", stringValue);
return stringValue;
}
}
case NULL:
// TODO: Check if this is valid
return null;
// TODO: case "REGEX":
// TODO: case "JAVASCRIPT":
// TODO: case "SYMBOL":
// TODO: case "JAVASCRIPT_SCOPE":
case INT32:
return Integer.parseInt(stringValue);
case INT64:
return Long.parseLong(stringValue);
case DOCUMENT:
return populateDocument((DocumentFieldDefinition) fd, stringValue);
default:
throw new UnsupportedOperationException("Unsupported type: " + fd.getType().name());
}
}
public DBObject parse(Event event) {
DBObject dbObject = new BasicDBObject();
if (definition.getBodyType() != MongoDataType.NULL) {
Object obj = null;
if (definition.getBodyType() == MongoDataType.BINARY && definition.getBodyEncoding().equals("raw")) {
obj = event.getBody();
} else if (definition.getBodyType() == MongoDataType.STRING) {
Charset charset = Charset.forName(definition.getBodyEncoding());
obj = new String(event.getBody(), charset);
} else {
SimpleFieldDefinition fd = new SimpleFieldDefinition();
fd.setType(definition.getBodyType());
fd.setEncoding(definition.getBodyEncoding());
obj = parseValue(fd, new String(event.getBody(), Charsets.UTF_8));
}
if (!"".equals(definition.getBodyField())) {
dbObject.put(definition.getBodyField(), obj);
} else if (obj instanceof DBObject) {
dbObject = (DBObject) obj;
} else {
log.warn("Could not map body to JSON document: {}", obj);
}
}
final Map<String, String> eventHeaders = event.getHeaders();
if (definition.allowsAdditionalProperties()) {
for (final Map.Entry<String, String> headerEntry : eventHeaders.entrySet()) {
final String fieldName = headerEntry.getKey();
final String fieldValue = headerEntry.getValue();
FieldDefinition def = definition.getFieldDefinitionByName(fieldName);
if (def == null) {
dbObject.put(fieldName, parseValue(null, fieldValue));
} else {
final String mappedName = (def.getMappedName() == null) ? def.getFieldName() : def.getMappedName();
if (eventHeaders.containsKey(fieldName)) {
dbObject.put(mappedName, parseValue(def, fieldValue));
}
}
}
} else {
for (FieldDefinition def : definition.getFields()) {
final String fieldName = def.getFieldName();
final String mappedName = (def.getMappedName() == null) ? def.getFieldName() : def.getMappedName();
if (containsKey(eventHeaders, fieldName)) {
dbObject.put(mappedName, parseValue(def, getFieldName(eventHeaders, fieldName)));
}
}
}
return dbObject;
}
private String getFieldName(Map<String, String> eventHeaders, String fieldName) {
String value = null;
if (fieldName.contains(".")) {
ObjectMapper mapper = new ObjectMapper();
final String[] fieldNameSplitted = fieldName.split("\\.");
try {
final String objectName = fieldNameSplitted[0];
JsonNode jsonNode = mapper.readTree(eventHeaders.get(objectName));
value = jsonNode.findValue(fieldNameSplitted[fieldNameSplitted.length - 1]).getTextValue();
} catch (Exception e) {
e.printStackTrace();
}
} else {
value = eventHeaders.get(fieldName);
}
return value;
}
private boolean containsKey(Map<String, String> eventHeaders, String fieldName) {
if (StringUtils.isNotBlank(fieldName) && fieldName.contains(".")) {
final String[] fieldNameSplitted = fieldName.split("\\.");
return eventHeaders.containsKey(fieldNameSplitted[0]);
} else {
return eventHeaders.containsKey(fieldName);
}
}
public List<DBObject> parse(List<Event> events) {
List<DBObject> rows = new ArrayList<DBObject>(events.size());
for (Event event : events) {
rows.add(this.parse(event));
}
return rows;
}
private DBObject populateDocument(DocumentFieldDefinition fd, String document) {
DBObject dbObject = null;
final String delimiter = fd.getDelimiter();
if (!StringUtils.isEmpty(delimiter)) {
String[] documentAsArrray = document.split(Pattern.quote(delimiter));
dbObject = new BasicDBObject();
Map<String, FieldDefinition> documentMapping = new LinkedHashMap<String, FieldDefinition>(
fd.getDocumentMapping());
int i = 0;
for (Map.Entry<String, FieldDefinition> documentField : documentMapping.entrySet()) {
if (DOCUMENT_TYPE.equalsIgnoreCase(documentField.getValue().getType().name())) {
dbObject.put(documentField.getKey(), parseValue(documentField.getValue(),
StringUtils.join(Arrays.copyOfRange(documentAsArrray, i, documentAsArrray.length), fd.getDelimiter())));
i += ((DocumentFieldDefinition) documentField.getValue()).getDocumentMapping().size();
} else {
dbObject.put(documentField.getKey(), parseValue(documentField.getValue(), documentAsArrray[i++]));
}
}
} else {
throw new MongoSinkException("Delimiter char must be set");
}
return dbObject;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlet.container.services;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertNull;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertArrayEquals;
import java.io.IOException;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.portlet.ReadOnlyException;
import javax.portlet.ValidatorException;
import org.apache.commons.collections.EnumerationUtils;
import org.jasig.portal.portlet.dao.jpa.PortletPreferenceImpl;
import org.jasig.portal.portlet.om.IPortletPreference;
import org.jasig.portal.url.ParameterMap;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
public class AbstractPortletPreferencesImplTest {
private Map<String, IPortletPreference> targetPrefs = new LinkedHashMap<String, IPortletPreference>();
private Map<String, IPortletPreference> basePrefs = new LinkedHashMap<String, IPortletPreference>();
private Map<String, IPortletPreference> storedPrefs = Collections.emptyMap();
private boolean modified = true;
private AbstractPortletPreferencesImpl<Object> portletPreferences;
@Before
public void setup() {
targetPrefs = new LinkedHashMap<String, IPortletPreference>();
basePrefs = new LinkedHashMap<String, IPortletPreference>();
storedPrefs = Collections.emptyMap();
modified = true;
portletPreferences = new AbstractPortletPreferencesImpl<Object>(false) {
@Override
protected Object getLogDescription() {
return "TEST";
}
@Override
protected void loadTargetPortletPreferences(Object initContext, Map<String, IPortletPreference> targetPortletPreferences) {
targetPortletPreferences.putAll(targetPrefs);
}
@Override
protected void loadBasePortletPreferences(Object initContext, Map<String, IPortletPreference> basePortletPreferences) {
basePortletPreferences.putAll(basePrefs);
}
@Override
protected boolean storeInternal() throws IOException, ValidatorException {
final Map<String, IPortletPreference> targetPortletPreferences = this.getTargetPortletPreferences();
storedPrefs = ImmutableMap.copyOf(targetPortletPreferences);
return modified;
}
};
}
protected void addPref(Map<String, IPortletPreference> prefs, String name, boolean readOnly, String[] values) {
final PortletPreferenceImpl preference = new PortletPreferenceImpl(name, readOnly);
preference.setValues(values);
prefs.put(name, preference);
}
@Test(expected=IllegalArgumentException.class)
public void testIsReadOnlyNullKey() throws ReadOnlyException, ValidatorException, IOException {
portletPreferences.isReadOnly((String)null);
}
@Test
public void testIsReadOnly() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key1", true, new String[] { "default" });
addPref(basePrefs, "key2", false, new String[] { "default" });
boolean readOnly = portletPreferences.isReadOnly("key1");
assertTrue(readOnly);
readOnly = portletPreferences.isReadOnly("key2");
assertFalse(readOnly);
readOnly = portletPreferences.isReadOnly("key3");
assertFalse(readOnly);
}
@Test(expected=IllegalArgumentException.class)
public void testNullGetValueKey() throws ReadOnlyException {
portletPreferences.getValue(null, null);
}
@Test(expected=IllegalArgumentException.class)
public void testNullGetValuesKey() throws ReadOnlyException {
portletPreferences.getValues(null, null);
}
@Test
public void testGetValue() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key1", false, null);
addPref(basePrefs, "key2", false, new String[] { });
addPref(basePrefs, "key3", false, new String[] { null });
addPref(basePrefs, "key4", false, new String[] { "value1", "value2"});
String value = portletPreferences.getValue("key0", "FOOBAR");
assertEquals("FOOBAR", value);
value = portletPreferences.getValue("key1", "FOOBAR");
assertNull(value);
value = portletPreferences.getValue("key2", "FOOBAR");
assertNull(value);
value = portletPreferences.getValue("key3", "FOOBAR");
assertNull(value);
value = portletPreferences.getValue("key4", "FOOBAR");
assertEquals("value1", value);
}
@Test
public void testGetValues() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key1", false, null);
addPref(basePrefs, "key2", false, new String[] { });
addPref(basePrefs, "key3", false, new String[] { null });
addPref(basePrefs, "key4", false, new String[] { "value1", "value2"});
String[] values = portletPreferences.getValues("key0", new String[] { "FOOBAR" });
assertArrayEquals(new String[] { "FOOBAR" }, values);
values = portletPreferences.getValues("key1", new String[] { "FOOBAR" });
assertNull(values);
values = portletPreferences.getValues("key2", new String[] { "FOOBAR" });
assertArrayEquals(new String[] { }, values);
values = portletPreferences.getValues("key3", new String[] { "FOOBAR" });
assertArrayEquals(new String[] { null }, values);
values = portletPreferences.getValues("key4", new String[] { "FOOBAR" });
assertArrayEquals(new String[] { "value1", "value2"}, values);
}
@Test(expected=IllegalArgumentException.class)
public void testNullSetValueKey() throws ReadOnlyException {
portletPreferences.setValue(null, null);
}
@Test(expected=IllegalArgumentException.class)
public void testNullSetValuesKey() throws ReadOnlyException {
portletPreferences.setValues(null, null);
}
@Test
public void testNullSetValueValue() throws ReadOnlyException, ValidatorException, IOException {
portletPreferences.setValue("key", null);
portletPreferences.store();
assertEquals(1, this.storedPrefs.size());
final IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(new String[] { null }, pref.getValues());
assertFalse(pref.isReadOnly());
}
@Test
public void testNullSetValueValues() throws ReadOnlyException, ValidatorException, IOException {
portletPreferences.setValues("key", null);
portletPreferences.store();
assertEquals(1, this.storedPrefs.size());
final IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertNull(pref.getValues());
assertFalse(pref.isReadOnly());
}
@Test
public void testNullEntryInValues() throws ReadOnlyException, ValidatorException, IOException {
portletPreferences.setValues("key", new String[] { null });
portletPreferences.store();
assertEquals(1, this.storedPrefs.size());
final IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(new String[] { null }, pref.getValues());
assertFalse(pref.isReadOnly());
}
@Test(expected=ReadOnlyException.class)
public void testSetReadOnlyValue() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", true, new String[] { "default" });
//Set a modified value
portletPreferences.setValue("key", "modified" );
}
@Test(expected=ReadOnlyException.class)
public void testSetReadOnlyValues() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", true, new String[] { "default" });
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
}
@Test
public void testSetMatchesBase() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", false, new String[] { "default" });
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
assertEquals(1, this.storedPrefs.size());
IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(new String[] { "modified" }, pref.getValues());
assertFalse(pref.isReadOnly());
//Set the default value
portletPreferences.setValues("key", new String[] { "default" });
//Store again, should have nothing stored after this
portletPreferences.store();
assertEquals(0, this.storedPrefs.size());
}
@Test
public void testSetUpdateExisting() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", false, new String[] { "default" });
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
assertEquals(1, this.storedPrefs.size());
IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(new String[] { "modified" }, pref.getValues());
assertFalse(pref.isReadOnly());
//Set a modified value
portletPreferences.setValues("key", null);
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
assertEquals(1, this.storedPrefs.size());
pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(null, pref.getValues());
assertFalse(pref.isReadOnly());
}
@Test
public void testGetNames() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", false, new String[] { "default" });
addPref(basePrefs, "key1", false, new String[] { "default" });
Enumeration<String> names = portletPreferences.getNames();
assertEquals(ImmutableSet.of("key", "key1"), new HashSet<String>(EnumerationUtils.toList(names)));
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
portletPreferences.setValues("key3", new String[] { "modified" });
names = portletPreferences.getNames();
assertEquals(ImmutableSet.of("key", "key1", "key3"), new HashSet<String>(EnumerationUtils.toList(names)));
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
names = portletPreferences.getNames();
assertEquals(ImmutableSet.of("key", "key1", "key3"), new HashSet<String>(EnumerationUtils.toList(names)));
portletPreferences.reset("key3");
names = portletPreferences.getNames();
assertEquals(ImmutableSet.of("key", "key1"), new HashSet<String>(EnumerationUtils.toList(names)));
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
names = portletPreferences.getNames();
assertEquals(ImmutableSet.of("key", "key1"), new HashSet<String>(EnumerationUtils.toList(names)));
}
@Test
public void testGetMap() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", false, new String[] { "default" });
addPref(basePrefs, "key1", false, new String[] { "default" });
Map<String, String[]> map = portletPreferences.getMap();
assertEquals(ImmutableMap.of("key", ImmutableList.of("default"), "key1", ImmutableList.of("default")), ParameterMap.convertArrayMap(map));
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
portletPreferences.setValues("key3", new String[] { "modified" });
map = portletPreferences.getMap();
assertEquals(ImmutableMap.of("key", ImmutableList.of("modified"), "key1", ImmutableList.of("default"), "key3", ImmutableList.of("modified")), ParameterMap.convertArrayMap(map));
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
map = portletPreferences.getMap();
assertEquals(ImmutableMap.of("key", ImmutableList.of("modified"), "key1", ImmutableList.of("default"), "key3", ImmutableList.of("modified")), ParameterMap.convertArrayMap(map));
portletPreferences.reset("key3");
map = portletPreferences.getMap();
assertEquals(ImmutableMap.of("key", ImmutableList.of("modified"), "key1", ImmutableList.of("default")), ParameterMap.convertArrayMap(map));
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
map = portletPreferences.getMap();
assertEquals(ImmutableMap.of("key", ImmutableList.of("modified"), "key1", ImmutableList.of("default")), ParameterMap.convertArrayMap(map));
}
@Test(expected=ReadOnlyException.class)
public void testResetReadOnly() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", true, new String[] { "default" });
portletPreferences.reset("key");
}
@Test
public void testResetToBase() throws ReadOnlyException, ValidatorException, IOException {
addPref(basePrefs, "key", false, new String[] { "default" });
//Set a modified value
portletPreferences.setValues("key", new String[] { "modified" });
//Initial store, check that correct stored map is created
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
assertEquals(1, this.storedPrefs.size());
IPortletPreference pref = this.storedPrefs.get("key");
assertNotNull(pref);
assertEquals("key", pref.getName());
assertArrayEquals(new String[] { "modified" }, pref.getValues());
assertFalse(pref.isReadOnly());
//Get the current value
String[] values = portletPreferences.getValues("key", null);
assertArrayEquals(new String[] { "modified" }, values);
//Reset it
portletPreferences.reset("key");
//Get the default value
values = portletPreferences.getValues("key", null);
assertArrayEquals(new String[] { "default" }, values);
//Do another store to verify nothing gets stored
portletPreferences.store();
//Actually "store" the stored prefs
this.targetPrefs = new LinkedHashMap<String, IPortletPreference>(this.storedPrefs);
assertEquals(0, this.storedPrefs.size());
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hyracks.dataflow.common.comm.io.largeobject;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.comm.IFrameTupleAppender;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.comm.VSizeFrame;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.resources.memory.FrameManager;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.FrameFixedFieldAppender;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
public class FrameFixedFieldTupleAppenderTest {
static final int INPUT_BUFFER_SIZE = 4096;
static final int TEST_FRAME_SIZE = 256;
FrameFixedFieldAppender appender;
static ISerializerDeserializer[] fields = new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE,
};
static RecordDescriptor recordDescriptor = new RecordDescriptor(fields);
static ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(recordDescriptor.getFieldCount());
class SequetialDataVerifier implements IFrameWriter {
private final IFrameTupleAccessor accessor;
private IFrameTupleAccessor innerAccessor;
private int tid;
public SequetialDataVerifier(IFrameTupleAccessor accessor) {
this.accessor = accessor;
this.innerAccessor = new FrameTupleAccessor(recordDescriptor);
}
@Override
public void open() throws HyracksDataException {
this.tid = 0;
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
innerAccessor.reset(buffer);
for (int i = 0; i < innerAccessor.getTupleCount(); ++i) {
validate(innerAccessor, i);
}
}
private void validate(IFrameTupleAccessor innerAccessor, int i) {
assertTrue(tid < accessor.getTupleCount());
assertEquals(accessor.getTupleLength(tid), innerAccessor.getTupleLength(i));
assertArrayEquals(Arrays.copyOfRange(accessor.getBuffer().array(), accessor.getTupleStartOffset(tid),
accessor.getTupleEndOffset(tid)),
Arrays.copyOfRange(innerAccessor.getBuffer().array(), innerAccessor.getTupleStartOffset(i),
innerAccessor.getTupleEndOffset(i)));
tid++;
}
@Override
public void fail() throws HyracksDataException {
assert false;
}
@Override
public void close() throws HyracksDataException {
assertEquals(accessor.getTupleCount(), tid);
}
}
@Before
public void createAppender() throws HyracksDataException {
appender = new FrameFixedFieldAppender(fields.length);
FrameManager manager = new FrameManager(TEST_FRAME_SIZE);
IFrame frame = new VSizeFrame(manager);
appender.reset(frame, true);
}
private void testProcess(IFrameTupleAccessor accessor) throws HyracksDataException {
IFrameWriter writer = prepareValidator(accessor);
writer.open();
for (int tid = 0; tid < accessor.getTupleCount(); tid++) {
for (int fid = 0; fid < fields.length; fid++) {
if (!appender.appendField(accessor, tid, fid)) {
appender.flush(writer, true);
if (!appender.appendField(accessor, tid, fid)) {
}
}
}
}
appender.flush(writer, true);
writer.close();
}
@Test
public void testAppendFieldShouldSucceed() throws HyracksDataException {
IFrameTupleAccessor accessor = prepareData(DATA_TYPE.NORMAL_RECORD);
testProcess(accessor);
}
@Test
public void testResetShouldWork() throws HyracksDataException {
testAppendFieldShouldSucceed();
appender.reset(new VSizeFrame(new FrameManager(TEST_FRAME_SIZE)), true);
testAppendFieldShouldSucceed();
}
private IFrameWriter prepareValidator(IFrameTupleAccessor accessor) throws HyracksDataException {
return new SequetialDataVerifier(accessor);
}
enum DATA_TYPE {
NORMAL_RECORD,
ONE_FIELD_LONG,
ONE_RECORD_LONG,
}
private IFrameTupleAccessor prepareData(DATA_TYPE type) throws HyracksDataException {
IFrameTupleAccessor accessor = new FrameTupleAccessor(recordDescriptor);
IFrameTupleAppender appender = new FrameTupleAppender(
new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)), true);
int i = 0;
do {
switch (type) {
case NORMAL_RECORD:
makeATuple(tupleBuilder, i++);
break;
case ONE_FIELD_LONG:
makeASizeUpTuple(tupleBuilder, i++);
break;
case ONE_RECORD_LONG:
makeABigObjectTuple(tupleBuilder, i++);
break;
}
} while (appender
.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0, tupleBuilder.getSize()));
accessor.reset(appender.getBuffer());
return accessor;
}
private void makeATuple(ArrayTupleBuilder tupleBuilder, int i) throws HyracksDataException {
tupleBuilder.reset();
tupleBuilder.addField(fields[0], i);
tupleBuilder.addField(fields[1], String.valueOf(i));
tupleBuilder.addField(fields[2], -i);
tupleBuilder.addField(fields[3], String.valueOf(-i));
}
private String makeALongString(int length, char ch) {
char[] array = new char[length];
Arrays.fill(array, ch);
return new String(array);
}
private void makeASizeUpTuple(ArrayTupleBuilder tupleBuilder, int i) throws HyracksDataException {
tupleBuilder.reset();
tupleBuilder.addField(fields[0], i);
tupleBuilder.addField(fields[1], makeALongString(Math.min(Math.abs(1 << i), INPUT_BUFFER_SIZE), (char) i));
tupleBuilder.addField(fields[2], -i);
tupleBuilder.addField(fields[3], String.valueOf(-i));
}
private void makeABigObjectTuple(ArrayTupleBuilder tupleBuilder, int i) throws HyracksDataException {
tupleBuilder.reset();
tupleBuilder.addField(fields[0], i);
tupleBuilder.addField(fields[1], makeALongString(Math.min(i * 20, TEST_FRAME_SIZE), (char) i));
tupleBuilder.addField(fields[2], -i);
tupleBuilder.addField(fields[3], makeALongString(Math.min(i * 20, TEST_FRAME_SIZE), (char) i));
}
@Test
public void testAppendLargeFieldShouldSucceed() throws HyracksDataException {
IFrameTupleAccessor accessor = prepareData(DATA_TYPE.ONE_FIELD_LONG);
testProcess(accessor);
}
@Test
public void testAppendSmallFieldButLargeObjectWithShouldSucceed() throws HyracksDataException {
IFrameTupleAccessor accessor = prepareData(DATA_TYPE.ONE_RECORD_LONG);
testProcess(accessor);
}
}
| |
package edu.mit.cgao.game2048;
import android.app.DialogFragment;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.TableLayout;
import android.widget.TextView;
import java.util.Random;
public class MainActivity extends ActionBarActivity {
private int[][] cellIDs = new int[4][4];
private int[][] matrix = new int[4][4];
private boolean newSeed = false;
private int score = 0;
private int zeroCounts = 16;
private ActivitySwipeDetector swipeDetector;
private TableLayout myTable;
private int[] textColors = new int[12];
private int[] bgColors = new int[12];
@Override
protected void onCreate(Bundle savedInstanceState) {
textColors[0] = Color.BLACK;
textColors[1] = Color.BLACK;
textColors[2] = Color.BLACK;
textColors[3] = Color.BLACK;
textColors[4] = Color.BLACK;
textColors[5] = Color.WHITE;
textColors[6] = Color.WHITE;
textColors[7] = Color.WHITE;
textColors[8] = Color.WHITE;
textColors[9] = Color.WHITE;
textColors[10] = Color.WHITE;
textColors[11] = Color.WHITE;
bgColors[0] = Color.rgb(250,235,215);
bgColors[1] = Color.rgb(255,235,205);
bgColors[2] = Color.rgb(255,228,196);
bgColors[3] = Color.rgb(245,222,179);
bgColors[4] = Color.rgb(255,192,203);
bgColors[5] = Color.rgb(255,105,180);
bgColors[6] = Color.rgb(255,20,147);
bgColors[7] = Color.rgb(199,21,133);
bgColors[8] = Color.rgb(128,0,128);
bgColors[9] = Color.rgb(205,92,92);
bgColors[10] = Color.rgb(255,51,51);
bgColors[11] = Color.rgb(255,0,0);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initCellIDs();
initMatrix();
writeTextView();
swipeDetector = new ActivitySwipeDetector(this);
myTable = (TableLayout)findViewById(R.id.tableLayout);
myTable.setOnTouchListener(swipeDetector);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_refresh){
//showResetDialog();
showGameOverDialog();
}
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private int random(){
// generate random number between 1 to zeroCounts
int max = zeroCounts;
int min = 1;
Random rand = new Random();
int randomNum = rand.nextInt((max - min) + 1) + min;
return randomNum;
}
private int[] findRandom(int rd){
int count = 0;
for (int i = 0; i < 4; ++i){
for (int j = 0; j < 4; ++j){
if (matrix[i][j] == 0){
++count;
if(count == rd){
int[] result = {i,j};
return result;
}
}
}
}
int[] result = {-1,-1};
return result;
}
private void initMatrix() {
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
setMatrixValue(0, i, j);
}
}
int rd = random();
int[] index = findRandom(rd);
setMatrixValue(2,index[0],index[1]);
newSeed = false;
setScore(0);
}
public void resetGame(View view){
initMatrix();
}
private void initCellIDs(){
for (int i = 0; i < 4; ++i)
for (int j = 0; j < 4; ++j) {
String cellId = "cell" + i + j;
cellIDs[i][j] = getResources().getIdentifier(cellId, "id", getPackageName());
}
}
private void writeTextView() {
CellTextView textViewToChange;
for(int i = 0; i<4; ++i){
for(int j = 0; j < 4; ++j) {
textViewToChange = (CellTextView) findViewById(cellIDs[i][j]);
if(textViewToChange != null){
if (matrix[i][j] != 0){
textViewToChange.setText(String.valueOf(matrix[i][j]));
}
else{
textViewToChange.setText("");
}
}
}
}
}
private void setMatrixValue(int value, int i, int j){
if(matrix[i][j] == 0 && value != 0) --zeroCounts;
if(matrix[i][j] != 0 && value == 0) ++zeroCounts;
matrix[i][j] = value;
CellTextView textViewToChange = (CellTextView) findViewById(cellIDs[i][j]);
if (matrix[i][j] != 0){
textViewToChange.setText(String.valueOf(matrix[i][j]));
}
else{
textViewToChange.setText("");
}
int colorIndex = 0;
if (value != 0){
colorIndex = (int) (Math.log(value)/Math.log(2));
}
textViewToChange.setTextColor(textColors[colorIndex]);
textViewToChange.setBackgroundColor(bgColors[colorIndex]);
}
private void setScore(int score_){
score = score_;
TextView textViewToChange = (TextView) findViewById(R.id.score);
textViewToChange.setText("Score: \n " + score);
}
private void gameEngine(int[] arr){
boolean[] changed = {false,false,false,false};
//arr[3]: no changes
//arr[2]
if (arr[2] != 0){
if (arr[3] == 0){
arr[3] = arr[2];
arr[2] = 0;
newSeed = true;
}
else{
if (arr[3] == arr[2]){
arr[3] *= 2;
setScore(score + arr[3]);
arr[2] = 0;
changed[3] = true;
newSeed = true;
}
}
}
//arr[1]
if (arr[1] != 0){
if (arr[2] == 0 && arr[3] == 0){
arr[3] = arr[1];
arr[1] = 0;
newSeed = true;
}
else if (arr[2] == 0){ //arr[3]!=0
if (arr[3] == arr[1] && !changed[3]){
arr[3] *= 2;
setScore(score + arr[3]);
}
else{
arr[2] = arr[1];
}
arr[1] = 0;
newSeed = true;
}
else{// arr[3]!=0, arr[2]!=0
if (arr[2] == arr[1]){
arr[2] *= 2;
setScore(score + arr[2]);
changed[2] = true;
arr[1] = 0;
newSeed = true;
}
}
}
//arr[0]
if (arr[0]!=0){
if (arr[1] == 0 && arr[2] == 0 && arr[3] == 0){
arr[3] = arr[0];
arr[0] = 0;
newSeed = true;
}
else if(arr[1] == 0 && arr[2] ==0){ //arr[3]!=0
if (arr[3] == arr[0] && !changed[3]){
arr[3] *= 2;
setScore(score + arr[3]);
changed[3] = true;
}
else{
arr[2] = arr[0];
}
arr[0] = 0;
newSeed = true;
}
else if(arr[1] == 0 ){ //arr[2]!=0, arr[3]!=0
if(arr[2] == arr[0] && !changed[2]){
arr[2] *= 2;
setScore(score + arr[2]);
}
else{
arr[1] = arr[0];
}
arr[0] = 0;
newSeed = true;
}
else { //arr[1]!=0, arr[2]!=0, arr[3]!=0
if(arr[1] == arr[0]){
arr[1] *= 2;
setScore(score + arr[1]);
//changed[1] = true; //not necessary
arr[0] = 0;
newSeed = true;
}
}
}
}
private boolean isGameOver(){
if (zeroCounts != 0) return false;
for (int i = 1; i < 4; ++i){
for (int j = 1; j < 4; ++j){
if (matrix[i][j] == matrix[i][j-1]) return false;
}
}
for (int i = 1; i < 4; ++i){
for (int j = 0 ; j < 4; ++j){
if (matrix[i][j] == matrix[i-1][j]) return false;
}
}
return true;
}
public void onSwipe(int indicator){
if (isGameOver()){
showGameOverDialog();
}
switch(indicator){
case 1:
newSeed = false;
for (int j = 0; j < 4; ++j ){
int[] arr = {matrix[3][j],matrix[2][j],matrix[1][j],matrix[0][j]};
gameEngine(arr);
setMatrixValue(arr[0],3,j);
setMatrixValue(arr[1],2,j);
setMatrixValue(arr[2],1,j);
setMatrixValue(arr[3],0,j);
}
if(newSeed) {
int rd = random();
int[] index = findRandom(rd);
setMatrixValue(2, index[0], index[1]);
newSeed = false;
}
break;
case 2:
newSeed = false;
for (int j = 0; j < 4; ++j ){
int[] arr = {matrix[0][j],matrix[1][j],matrix[2][j],matrix[3][j]};
gameEngine(arr);
setMatrixValue(arr[0],0,j);
setMatrixValue(arr[1],1,j);
setMatrixValue(arr[2],2,j);
setMatrixValue(arr[3],3,j);
}
if(newSeed){
int rd = random();
int[] index = findRandom(rd);
setMatrixValue(2,index[0],index[1]);
newSeed = false;
}
break;
case 3:
newSeed = false;
for (int i = 0; i < 4; ++i ){
int[] arr = {matrix[i][3],matrix[i][2],matrix[i][1],matrix[i][0]};
gameEngine(arr);
setMatrixValue(arr[0],i,3);
setMatrixValue(arr[1],i,2);
setMatrixValue(arr[2],i,1);
setMatrixValue(arr[3],i,0);
}
if(newSeed){
int rd = random();
int[] index = findRandom(rd);
setMatrixValue(2,index[0],index[1]);
newSeed = false;
}
break;
case 4:
newSeed = false;
for (int i = 0; i < 4; ++i ){
int[] arr = {matrix[i][0],matrix[i][1],matrix[i][2],matrix[i][3]};
gameEngine(arr);
setMatrixValue(arr[0],i,0);
setMatrixValue(arr[1],i,1);
setMatrixValue(arr[2],i,2);
setMatrixValue(arr[3],i,3);
}
if(newSeed){
int rd = random();
int[] index = findRandom(rd);
setMatrixValue(2,index[0],index[1]);
newSeed = false;
}
break;
default:
break;
}
}
void showResetDialog() {
String reset_title = getString(R.string.reset_title);
String reset_msg = getString(R.string.reset_msg);
DialogFragment newFragment = MainDialogFragment.newInstance
(reset_title, reset_msg);
newFragment.show(getFragmentManager(), "dialogReset");
}
void showGameOverDialog(){
String gameover_title = getString(R.string.gameover_title);
String gameover_msg = getString(R.string.gameover_msg,score);
DialogFragment newFragment = MainDialogFragment.newInstance(gameover_title, gameover_msg);
newFragment.show(getFragmentManager(), "dialogGameover");
}
public void doOkClick(){
initMatrix();
}
public void doCancelClick(){
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Collection;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
import org.apache.lucene.queries.function.valuesource.FieldCacheSource;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.StatsParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.request.SolrQueryRequest; // jdocs
import org.apache.solr.request.DocValuesStats;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
/**
* Models all of the information associated with a single {@link StatsParams#STATS_FIELD}
* instance.
*
* @see StatsComponent
*/
public class StatsField {
private final SolrIndexSearcher searcher;
private final ResponseBuilder rb;
private final String originalParam; // for error messages
private final SolrParams localParams;
private final ValueSource valueSource; // may be null if simple field stats
private final SchemaField schemaField; // may be null if function/query stats
private final String key;
private final boolean calcDistinct; // TODO: put this inside localParams ? SOLR-6349 ?
private final String[] facets;
private final List<String> excludeTagList;
/**
* @param rb the current request/response
* @param statsParam the raw {@link StatsParams#STATS_FIELD} string
*/
public StatsField(ResponseBuilder rb, String statsParam) {
this.rb = rb;
this.searcher = rb.req.getSearcher();
this.originalParam = statsParam;
SolrParams params = rb.req.getParams();
try {
SolrParams localParams = QueryParsing.getLocalParams(originalParam, params);
if (null == localParams) {
// simplest possible input: bare string (field name)
ModifiableSolrParams customParams = new ModifiableSolrParams();
customParams.add(QueryParsing.V, originalParam);
localParams = customParams;
}
this.localParams = localParams;
String parserName = localParams.get(QueryParsing.TYPE);
SchemaField sf = null;
ValueSource vs = null;
if ( StringUtils.isBlank(parserName) ) {
// basic request for field stats
sf = searcher.getSchema().getField(localParams.get(QueryParsing.V));
} else {
// we have a non trivial request to compute stats over a query (or function)
// NOTE we could use QParser.getParser(...) here, but that would redundently
// reparse everything. ( TODO: refactor a common method in QParser ?)
QParserPlugin qplug = rb.req.getCore().getQueryPlugin(parserName);
QParser qp = qplug.createParser(localParams.get(QueryParsing.V),
localParams, params, rb.req);
// figure out what type of query we are dealing, get the most direct ValueSource
vs = extractValueSource(qp.parse());
// if this ValueSource directly corrisponds to a SchemaField, act as if
// we were asked to compute stats on it directly
// ie: "stats.field={!func key=foo}field(foo)" == "stats.field=foo"
sf = extractSchemaField(vs, searcher.getSchema());
if (null != sf) {
vs = null;
}
}
assert ( (null == vs) ^ (null == sf) ) : "exactly one of vs & sf must be null";
this.schemaField = sf;
this.valueSource = vs;
} catch (SyntaxError e) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Unable to parse " +
StatsParams.STATS_FIELD + ": " + originalParam + " due to: "
+ e.getMessage(), e);
}
// allow explicit setting of the response key via localparams...
this.key = localParams.get(CommonParams.OUTPUT_KEY,
// default to the main param value...
localParams.get(CommonParams.VALUE,
// default to entire original param str.
originalParam));
this.calcDistinct = null == schemaField
? params.getBool(StatsParams.STATS_CALC_DISTINCT, false)
: params.getFieldBool(schemaField.getName(), StatsParams.STATS_CALC_DISTINCT, false);
String[] facets = params.getFieldParams(key, StatsParams.STATS_FACET);
this.facets = (null == facets) ? new String[0] : facets;
// figure out if we need a special base DocSet
String excludeStr = localParams.get(CommonParams.EXCLUDE);
this.excludeTagList = (null == excludeStr)
? Collections.<String>emptyList()
: StrUtils.splitSmart(excludeStr,',');
assert ( (null == this.valueSource) ^ (null == this.schemaField) )
: "exactly one of valueSource & schemaField must be null";
}
/**
* Inspects a {@link Query} to see if it directly maps to a {@link ValueSource},
* and if so returns it -- otherwise wraps it as needed.
*
* @param q Query whose scores we have been asked to compute stats of
* @returns a ValueSource to use for computing the stats
*/
private static ValueSource extractValueSource(Query q) {
return (q instanceof FunctionQuery) ?
// Common case: we're wrapping a func, so we can directly pull out ValueSource
((FunctionQuery) q).getValueSource() :
// asked to compute stats over a query, wrap it up as a ValueSource
new QueryValueSource(q, 0.0F);
}
/**
* Inspects a {@link ValueSource} to see if it directly maps to a {@link SchemaField},
* and if so returns it.
*
* @param vs ValueSource we've been asked to compute stats of
* @param schema The Schema to use
* @returns Corrisponding {@link SchemaField} or null if the ValueSource is more complex
* @see FieldCacheSource
*/
private static SchemaField extractSchemaField(ValueSource vs, IndexSchema schema) {
if (vs instanceof FieldCacheSource) {
String fieldName = ((FieldCacheSource)vs).getField();
return schema.getField(fieldName);
}
return null;
}
/**
* The key to be used when refering to this {@link StatsField} instance in the
* response tp clients.
*/
public String getOutputKey() {
return key;
}
/**
* Computes a base {@link DocSet} for the current request to be used
* when computing global stats for the local index.
*
* This is typically the same as the main DocSet for the {@link ResponseBuilder}
* unless {@link CommonParams#TAG tag}ged filter queries have been excluded using
* the {@link CommonParams#EXCLUDE ex} local param
*/
public DocSet computeBaseDocSet() throws IOException {
DocSet docs = rb.getResults().docSet;
Map<?,?> tagMap = (Map<?,?>) rb.req.getContext().get("tags");
if (excludeTagList.isEmpty() || null == tagMap) {
// either the exclude list is empty, or there
// aren't any tagged filters to exclude anyway.
return docs;
}
IdentityHashMap<Query,Boolean> excludeSet = new IdentityHashMap<Query,Boolean>();
for (String excludeTag : excludeTagList) {
Object olst = tagMap.get(excludeTag);
// tagMap has entries of List<String,List<QParser>>, but subject to change in the future
if (!(olst instanceof Collection)) continue;
for (Object o : (Collection<?>)olst) {
if (!(o instanceof QParser)) continue;
QParser qp = (QParser)o;
try {
excludeSet.put(qp.getQuery(), Boolean.TRUE);
} catch (SyntaxError e) {
// this shouldn't be possible since the request should have already
// failed when attempting to execute the query, but just in case...
throw new SolrException(ErrorCode.BAD_REQUEST, "Excluded query can't be parsed: " +
originalParam + " due to: " + e.getMessage(), e);
}
}
}
if (excludeSet.size() == 0) return docs;
List<Query> qlist = new ArrayList<Query>();
// add the base query
if (!excludeSet.containsKey(rb.getQuery())) {
qlist.add(rb.getQuery());
}
// add the filters
if (rb.getFilters() != null) {
for (Query q : rb.getFilters()) {
if (!excludeSet.containsKey(q)) {
qlist.add(q);
}
}
}
// get the new base docset for this facet
return searcher.getDocSet(qlist);
}
/**
* Computes the {@link StatsValues} for this {@link StatsField} relative to the
* specified {@link DocSet}
* @see #computeBaseDocSet
*/
public StatsValues computeLocalStatsValues(DocSet base) throws IOException {
if (null != schemaField
&& (schemaField.multiValued() || schemaField.getType().multiValuedFieldCache())) {
// TODO: should this also be used for single-valued string fields? (should work fine)
return DocValuesStats.getCounts(searcher, this, base, facets);
} else {
// either a single valued field we pull from FieldCache, or an explicit
// function ValueSource
return computeLocalValueSourceStats(base);
}
}
private StatsValues computeLocalValueSourceStats(DocSet base) throws IOException {
IndexSchema schema = searcher.getSchema();
final StatsValues allstats = StatsValuesFactory.createStatsValues(this);
List<FieldFacetStats> facetStats = new ArrayList<>();
for( String facetField : facets ) {
SchemaField fsf = schema.getField(facetField);
if ( fsf.multiValued()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Stats can only facet on single-valued fields, not: " + facetField );
}
facetStats.add(new FieldFacetStats(searcher, fsf, this));
}
final Iterator<LeafReaderContext> ctxIt = searcher.getIndexReader().leaves().iterator();
LeafReaderContext ctx = null;
for (DocIterator docsIt = base.iterator(); docsIt.hasNext(); ) {
final int doc = docsIt.nextDoc();
if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) {
// advance
do {
ctx = ctxIt.next();
} while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc());
assert doc >= ctx.docBase;
// propagate the context among accumulators.
allstats.setNextReader(ctx);
for (FieldFacetStats f : facetStats) {
f.setNextReader(ctx);
}
}
// accumulate
allstats.accumulate(doc - ctx.docBase);
for (FieldFacetStats f : facetStats) {
f.facet(doc - ctx.docBase);
}
}
for (FieldFacetStats f : facetStats) {
allstats.addFacet(f.name, f.facetStatsValues);
}
return allstats;
}
/**
* The searcher that should be used for processing local stats
* @see SolrQueryRequest#getSearcher
*/
public SolrIndexSearcher getSearcher() {
// see AbstractStatsValues.setNextReader
return searcher;
}
/**
* The {@link SchemaField} whose results these stats are computed over, may be null
* if the stats are computed over the results of a function or query
*
* @see #getValueSource
*/
public SchemaField getSchemaField() {
return schemaField;
}
/**
* The {@link ValueSource} of a function or query whose results these stats are computed
* over, may be null if the stats are directly over a {@link SchemaField}
*
* @see #getValueSource
*/
public ValueSource getValueSource() {
return valueSource;
}
/**
* Wether or not the effective value of the {@link StatsParams#STATS_CALC_DISTINCT} param
* is true or false for this StatsField
*/
public boolean getCalcDistinct() {
return calcDistinct;
}
public String toString() {
return "StatsField<" + originalParam + ">";
}
}
| |
package nl.esciencecenter.aether.impl.stacking.lrmc;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import nl.esciencecenter.aether.ConfigurationException;
import nl.esciencecenter.aether.MessageUpcall;
import nl.esciencecenter.aether.NoSuchPropertyException;
import nl.esciencecenter.aether.PortType;
import nl.esciencecenter.aether.ReadMessage;
import nl.esciencecenter.aether.ReceivePortIdentifier;
import nl.esciencecenter.aether.ReceiveTimedOutException;
import nl.esciencecenter.aether.SendPortIdentifier;
import nl.esciencecenter.aether.util.ThreadPool;
public class LRMCReceivePort implements nl.esciencecenter.aether.ReceivePort, Runnable {
private final LRMCReceivePortIdentifier identifier;
private final MessageUpcall upcall;
private final Multicaster om;
private LRMCReadMessage message = null;
private boolean closed = false;
private boolean messageIsAvailable = false;
private boolean upcallsEnabled = false;
public LRMCReceivePort(Multicaster om, LRMCAether ibis, MessageUpcall upcall,
Properties properties) throws IOException {
this.om = om;
identifier = new LRMCReceivePortIdentifier(ibis.identifier(), om.name);
this.upcall = upcall;
if (upcall != null
&& !om.portType.hasCapability(PortType.RECEIVE_AUTO_UPCALLS)) {
throw new ConfigurationException(
"no connection upcalls requested for this port type");
}
ThreadPool.createNew(this, "ReceivePort");
}
public synchronized void close() throws IOException {
closed = true;
om.removeReceivePort();
notifyAll();
}
public void close(long timeoutMillis) throws IOException {
close();
}
public SendPortIdentifier[] connectedTo() {
throw new ConfigurationException("connection downcalls not supported");
}
public void disableConnections() {
// throw new IbisConfigurationException("connection upcalls not supported");
}
public synchronized void disableMessageUpcalls() {
upcallsEnabled = false;
}
public void enableConnections() {
// throw new IbisConfigurationException("connection upcalls not supported");
}
public synchronized void enableMessageUpcalls() {
upcallsEnabled = true;
notifyAll();
}
public PortType getPortType() {
return om.portType;
}
public ReceivePortIdentifier identifier() {
return identifier;
}
public SendPortIdentifier[] lostConnections() {
throw new ConfigurationException("connection downcalls not supported");
}
public String name() {
return identifier.name;
}
public SendPortIdentifier[] newConnections() {
throw new ConfigurationException("connection downcalls not supported");
}
public synchronized ReadMessage poll() throws IOException {
if (closed) {
throw new IOException("port is closed");
}
if (messageIsAvailable) {
messageIsAvailable = false;
return message;
}
return null;
}
public ReadMessage receive() throws IOException {
return receive(0);
}
public ReadMessage receive(long timeout) throws IOException {
if (upcall != null) {
throw new ConfigurationException(
"Configured Receiveport for upcalls, downcall not allowed");
}
boolean hasTimeout = false;
if (timeout < 0) {
throw new IOException("timeout must be a non-negative number");
}
if (timeout > 0 && !om.portType.hasCapability(PortType.RECEIVE_TIMEOUT)) {
throw new ConfigurationException(
"This port is not configured for receive() with timeout");
}
synchronized(this) {
while (! messageIsAvailable ) {
if (closed) {
throw new IOException("port is closed");
}
if (timeout > 0) {
hasTimeout = true;
long tm = System.currentTimeMillis();
try {
wait(timeout);
} catch(Throwable e) {
// ignored
}
long tm1 = System.currentTimeMillis();
timeout -= (tm1 - tm);
} else if (hasTimeout) {
// timeout expired
throw new ReceiveTimedOutException();
} else {
try {
wait();
} catch(Throwable e) {
// ignored
}
}
}
messageIsAvailable = false;
return message;
}
}
public String getManagementProperty(String key)
throws NoSuchPropertyException {
throw new NoSuchPropertyException("No properties in LRMCReceivePort");
}
public Map<String, String> managementProperties() {
return new HashMap<String, String>();
}
public void printManagementProperties(PrintStream stream) {
}
public void setManagementProperties(Map<String, String> properties)
throws NoSuchPropertyException {
throw new NoSuchPropertyException("No properties in LRMCReceivePort");
}
public void setManagementProperty(String key, String value)
throws NoSuchPropertyException {
throw new NoSuchPropertyException("No properties in LRMCReceivePort");
}
synchronized void doFinish() {
message = null;
notifyAll();
}
private boolean doUpcall(LRMCReadMessage msg) {
synchronized(this) {
// Wait until upcalls are enabled.
while (! upcallsEnabled) {
try {
wait();
} catch(InterruptedException e) {
// ignored
}
}
}
try {
msg.setInUpcall(true);
upcall.upcall(msg);
} catch(IOException e) {
if (! msg.isFinished) {
msg.finish(e);
return false;
}
} catch(ClassNotFoundException e) {
if (! msg.isFinished) {
IOException ioex =
new IOException("Got ClassNotFoundException: "
+ e.getMessage());
ioex.initCause(e);
msg.finish(ioex);
return false;
}
return true;
} catch(Throwable e) {
System.exit(1);
} finally {
msg.setInUpcall(false);
}
if (! msg.isFinished) {
try {
msg.finish();
} catch(IOException e) {
msg.finish(e);
}
return false;
}
return true;
}
public void run() {
while (true) {
if (closed) {
return;
}
LRMCReadMessage m = om.receive();
if (m == null) {
return;
}
synchronized(this) {
while (message != null) {
try {
wait();
} catch(Throwable e) {
// ignored
}
}
messageIsAvailable = true;
message = m;
if (upcall == null) {
notifyAll();
}
}
if (upcall != null) {
if (doUpcall(m)) {
// The upcall method called finish.
// return this thread.
return;
}
}
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.application.options;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.options.UnnamedConfigurable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.platform.ModuleAttachProcessor;
import com.intellij.ui.CollectionListModel;
import com.intellij.ui.ListSpeedSearch;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yole
*/
public abstract class ModuleAwareProjectConfigurable<T extends UnnamedConfigurable> implements SearchableConfigurable,
Configurable.NoScroll {
@NotNull
private final Project myProject;
private final String myDisplayName;
private final String myHelpTopic;
private final Map<Module, T> myModuleConfigurables = new HashMap<>();
private final static String PROJECT_ITEM_KEY = "thisisnotthemoduleyouarelookingfor";
public ModuleAwareProjectConfigurable(@NotNull Project project, String displayName, @NonNls String helpTopic) {
myProject = project;
myDisplayName = displayName;
myHelpTopic = helpTopic;
}
@Nls
@Override
public String getDisplayName() {
return myDisplayName;
}
@Override
public String getHelpTopic() {
return myHelpTopic;
}
protected boolean isSuitableForModule(@NotNull Module module) {
return true;
}
@Override
public JComponent createComponent() {
if (myProject.isDefault()) {
T configurable = createDefaultProjectConfigurable();
if (configurable != null) {
myModuleConfigurables.put(null, configurable);
return configurable.createComponent();
}
}
final List<Module> modules = ContainerUtil.filter(ModuleAttachProcessor.getSortedModules(myProject),
module -> isSuitableForModule(module));
final T projectConfigurable = createProjectConfigurable();
if (modules.size() == 1 && projectConfigurable == null) {
Module module = modules.get(0);
final T configurable = createModuleConfigurable(module);
myModuleConfigurables.put(module, configurable);
return configurable.createComponent();
}
final Splitter splitter = new Splitter(false, 0.25f);
CollectionListModel<Module> listDataModel = new CollectionListModel<>(modules);
final JBList<Module> moduleList = new JBList<>(listDataModel);
new ListSpeedSearch<>(moduleList, (Function<Object, String>)o -> {
if (o == null) {
return getProjectConfigurableItemName();
}
else if (o instanceof Module) {
return ((Module)o).getName();
}
return null;
});
moduleList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
moduleList.setCellRenderer(new ModuleListCellRenderer() {
@Override
public void customize(@NotNull JList<? extends Module> list, Module module, int index, boolean selected, boolean hasFocus) {
if (module == null) {
setText(getProjectConfigurableItemName());
setIcon(getProjectConfigurableItemIcon());
}
else {
super.customize(list, module, index, selected, hasFocus);
}
}
});
splitter.setFirstComponent(new JBScrollPane(moduleList));
final CardLayout layout = new CardLayout();
final JPanel cardPanel = new JPanel(layout);
splitter.setSecondComponent(cardPanel);
if (projectConfigurable != null) {
myModuleConfigurables.put(null, projectConfigurable);
final JComponent component = projectConfigurable.createComponent();
cardPanel.add(component, PROJECT_ITEM_KEY);
listDataModel.add(0, null);
}
for (Module module : modules) {
final T configurable = createModuleConfigurable(module);
myModuleConfigurables.put(module, configurable);
final JComponent component = configurable.createComponent();
cardPanel.add(component, module.getName());
}
moduleList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
final Module value = moduleList.getSelectedValue();
layout.show(cardPanel, value == null ? PROJECT_ITEM_KEY : value.getName());
}
});
if (moduleList.getItemsCount() > 0) {
moduleList.setSelectedIndex(0);
Module module = listDataModel.getElementAt(0);
layout.show(cardPanel, module == null ? PROJECT_ITEM_KEY : module.getName());
}
return splitter;
}
@Nullable
protected T createDefaultProjectConfigurable() {
return null;
}
/**
* This configurable is for project-wide settings
*
* @return configurable or null if none
*/
@Nullable
protected T createProjectConfigurable() {
return null;
}
/**
* @return Name for project-wide settings in modules list
*/
@NotNull
protected String getProjectConfigurableItemName() {
return myProject.getName();
}
/**
* @return Icon for project-wide sttings in modules list
*/
@Nullable
protected Icon getProjectConfigurableItemIcon() {
return AllIcons.Nodes.Project;
}
@NotNull
protected abstract T createModuleConfigurable(Module module);
@Override
public boolean isModified() {
for (T configurable : myModuleConfigurables.values()) {
if (configurable.isModified()) return true;
}
return false;
}
@Override
public void apply() throws ConfigurationException {
for (T configurable : myModuleConfigurables.values()) {
configurable.apply();
}
}
@Override
public void reset() {
for (T configurable : myModuleConfigurables.values()) {
configurable.reset();
}
}
@Override
public void disposeUIResources() {
for (T configurable : myModuleConfigurables.values()) {
configurable.disposeUIResources();
}
myModuleConfigurables.clear();
}
@NotNull
@Override
public String getId() {
return getClass().getName();
}
@NotNull
protected final Project getProject() {
return myProject;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.config.ConfigView;
import com.facebook.buck.model.Either;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.File;
import java.nio.file.Path;
import java.util.Map;
import java.util.Optional;
/**
* A java-specific "view" of BuckConfig.
*/
public class JavaBuckConfig implements ConfigView<BuckConfig> {
public static final String SECTION = "java";
public static final String PROPERTY_COMPILE_AGAINST_ABIS = "compile_against_abis";
private final BuckConfig delegate;
// Interface for reflection-based ConfigView to instantiate this class.
public static JavaBuckConfig of(BuckConfig delegate) {
return new JavaBuckConfig(delegate);
}
private JavaBuckConfig(BuckConfig delegate) {
this.delegate = delegate;
}
@Override
public BuckConfig getDelegate() {
return delegate;
}
public JavaOptions getDefaultJavaOptions() {
return JavaOptions
.builder()
.setJavaPath(getPathToExecutable("java"))
.build();
}
public JavaOptions getDefaultJavaOptionsForTests() {
Optional<Path> javaTestPath = getPathToExecutable("java_for_tests");
if (javaTestPath.isPresent()) {
return JavaOptions
.builder()
.setJavaPath(javaTestPath)
.build();
}
return getDefaultJavaOptions();
}
public JavacOptions getDefaultJavacOptions() {
JavacOptions.Builder builder = JavacOptions.builderForUseInJavaBuckConfig();
Optional<String> sourceLevel = delegate.getValue(SECTION, "source_level");
if (sourceLevel.isPresent()) {
builder.setSourceLevel(sourceLevel.get());
}
Optional<String> targetLevel = delegate.getValue(SECTION, "target_level");
if (targetLevel.isPresent()) {
builder.setTargetLevel(targetLevel.get());
}
ImmutableList<String> extraArguments = delegate.getListWithoutComments(
SECTION,
"extra_arguments");
ImmutableList<String> safeAnnotationProcessors = delegate.getListWithoutComments(
SECTION,
"safe_annotation_processors");
Optional<AbstractJavacOptions.SpoolMode> spoolMode = delegate
.getEnum(SECTION, "jar_spool_mode", AbstractJavacOptions.SpoolMode.class);
if (spoolMode.isPresent()) {
builder.setSpoolMode(spoolMode.get());
}
builder.setTrackClassUsage(trackClassUsage());
AbiGenerationMode abiGenerationMode = getAbiGenerationMode();
switch (abiGenerationMode) {
case CLASS:
case SOURCE_WITH_DEPS:
builder.setCompilationMode(Javac.CompilationMode.FULL);
break;
case MIGRATING_TO_SOURCE:
builder.setCompilationMode(Javac.CompilationMode.FULL_CHECKING_REFERENCES);
break;
case SOURCE:
builder.setCompilationMode(Javac.CompilationMode.FULL_ENFORCING_REFERENCES);
break;
}
ImmutableMap<String, String> allEntries = delegate.getEntriesForSection(SECTION);
ImmutableMap.Builder<String, String> bootclasspaths = ImmutableMap.builder();
for (Map.Entry<String, String> entry : allEntries.entrySet()) {
if (entry.getKey().startsWith("bootclasspath-")) {
bootclasspaths.put(entry.getKey().substring("bootclasspath-".length()), entry.getValue());
}
}
return builder
.putAllSourceToBootclasspath(bootclasspaths.build())
.addAllExtraArguments(extraArguments)
.setSafeAnnotationProcessors(safeAnnotationProcessors)
.build();
}
public AbiGenerationMode getAbiGenerationMode() {
return delegate.getEnum(SECTION, "abi_generation_mode", AbiGenerationMode.class)
.orElse(AbiGenerationMode.CLASS);
}
public ImmutableSet<String> getSrcRoots() {
return ImmutableSet.copyOf(delegate.getListWithoutComments(SECTION, "src_roots"));
}
public DefaultJavaPackageFinder createDefaultJavaPackageFinder() {
return DefaultJavaPackageFinder.createDefaultJavaPackageFinder(getSrcRoots());
}
public boolean trackClassUsage() {
// This is just to make it possible to turn off dep-based rulekeys in case anything goes wrong
// and can be removed when we're sure class usage tracking and dep-based keys for Java
// work fine.
Optional<Boolean> trackClassUsage = delegate.getBoolean(SECTION, "track_class_usage");
if (trackClassUsage.isPresent() && !trackClassUsage.get()) {
return false;
}
final Javac.Source javacSource = getJavacSpec().getJavacSource();
return (javacSource == Javac.Source.JAR || javacSource == Javac.Source.JDK);
}
public JavacSpec getJavacSpec() {
return JavacSpec.builder()
.setJavacPath(
getJavacPath().isPresent()
? Optional.of(Either.ofLeft(getJavacPath().get()))
: Optional.empty())
.setJavacJarPath(delegate.getSourcePath("tools", "javac_jar"))
.setJavacLocation(
delegate.getEnum(SECTION, "location", Javac.Location.class)
.orElse(Javac.Location.IN_PROCESS))
.setCompilerClassName(delegate.getValue("tools", "compiler_class_name"))
.build();
}
@VisibleForTesting
Optional<Path> getJavacPath() {
return getPathToExecutable("javac");
}
private Optional<Path> getPathToExecutable(String executableName) {
Optional<Path> path = delegate.getPath("tools", executableName);
if (path.isPresent()) {
File file = path.get().toFile();
if (!file.canExecute()) {
throw new HumanReadableException(executableName + " is not executable: " + file.getPath());
}
return Optional.of(file.toPath());
}
return Optional.empty();
}
public boolean shouldCacheBinaries() {
return delegate.getBooleanValue(SECTION, "cache_binaries", true);
}
public Optional<Integer> getDxThreadCount() {
return delegate.getInteger(SECTION, "dx_threads");
}
/**
* Enables a special validation mode that generates ABIs both from source and from class files
* and diffs them. This is a test hook for use during development of the source ABI feature.
*/
public boolean shouldValidateAbisGeneratedFromSource() {
return delegate.getBooleanValue(SECTION, "validate_abis_from_source", false);
}
public boolean shouldCompileAgainstAbis() {
return delegate.getBooleanValue(SECTION, PROPERTY_COMPILE_AGAINST_ABIS, false);
}
public enum AbiGenerationMode {
/** Generate ABIs by stripping .class files */
CLASS,
/** Generate ABIs by parsing .java files with dependency ABIs available */
SOURCE_WITH_DEPS,
/**
* Output warnings for things that aren't legal when generating ABIs from source without
* dependency ABIs
*/
MIGRATING_TO_SOURCE,
/**
* Generate ABIs by parsing .java files without dependency ABIs available (has some limitations)
*/
SOURCE,
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2beta1;
import static com.google.cloud.dialogflow.v2beta1.DocumentsClient.ListDocumentsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.dialogflow.v2beta1.stub.DocumentsStubSettings;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link DocumentsClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dialogflow.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getDocument to 30 seconds:
*
* <pre>{@code
* DocumentsSettings.Builder documentsSettingsBuilder = DocumentsSettings.newBuilder();
* documentsSettingsBuilder
* .getDocumentSettings()
* .setRetrySettings(
* documentsSettingsBuilder
* .getDocumentSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* DocumentsSettings documentsSettings = documentsSettingsBuilder.build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class DocumentsSettings extends ClientSettings<DocumentsSettings> {
/** Returns the object with the settings used for calls to listDocuments. */
public PagedCallSettings<ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings() {
return ((DocumentsStubSettings) getStubSettings()).listDocumentsSettings();
}
/** Returns the object with the settings used for calls to getDocument. */
public UnaryCallSettings<GetDocumentRequest, Document> getDocumentSettings() {
return ((DocumentsStubSettings) getStubSettings()).getDocumentSettings();
}
/** Returns the object with the settings used for calls to createDocument. */
public UnaryCallSettings<CreateDocumentRequest, Operation> createDocumentSettings() {
return ((DocumentsStubSettings) getStubSettings()).createDocumentSettings();
}
/** Returns the object with the settings used for calls to createDocument. */
public OperationCallSettings<CreateDocumentRequest, Document, KnowledgeOperationMetadata>
createDocumentOperationSettings() {
return ((DocumentsStubSettings) getStubSettings()).createDocumentOperationSettings();
}
/** Returns the object with the settings used for calls to importDocuments. */
public UnaryCallSettings<ImportDocumentsRequest, Operation> importDocumentsSettings() {
return ((DocumentsStubSettings) getStubSettings()).importDocumentsSettings();
}
/** Returns the object with the settings used for calls to importDocuments. */
public OperationCallSettings<
ImportDocumentsRequest, ImportDocumentsResponse, KnowledgeOperationMetadata>
importDocumentsOperationSettings() {
return ((DocumentsStubSettings) getStubSettings()).importDocumentsOperationSettings();
}
/** Returns the object with the settings used for calls to deleteDocument. */
public UnaryCallSettings<DeleteDocumentRequest, Operation> deleteDocumentSettings() {
return ((DocumentsStubSettings) getStubSettings()).deleteDocumentSettings();
}
/** Returns the object with the settings used for calls to deleteDocument. */
public OperationCallSettings<DeleteDocumentRequest, Empty, KnowledgeOperationMetadata>
deleteDocumentOperationSettings() {
return ((DocumentsStubSettings) getStubSettings()).deleteDocumentOperationSettings();
}
/** Returns the object with the settings used for calls to updateDocument. */
public UnaryCallSettings<UpdateDocumentRequest, Operation> updateDocumentSettings() {
return ((DocumentsStubSettings) getStubSettings()).updateDocumentSettings();
}
/** Returns the object with the settings used for calls to updateDocument. */
public OperationCallSettings<UpdateDocumentRequest, Document, KnowledgeOperationMetadata>
updateDocumentOperationSettings() {
return ((DocumentsStubSettings) getStubSettings()).updateDocumentOperationSettings();
}
/** Returns the object with the settings used for calls to reloadDocument. */
public UnaryCallSettings<ReloadDocumentRequest, Operation> reloadDocumentSettings() {
return ((DocumentsStubSettings) getStubSettings()).reloadDocumentSettings();
}
/** Returns the object with the settings used for calls to reloadDocument. */
public OperationCallSettings<ReloadDocumentRequest, Document, KnowledgeOperationMetadata>
reloadDocumentOperationSettings() {
return ((DocumentsStubSettings) getStubSettings()).reloadDocumentOperationSettings();
}
public static final DocumentsSettings create(DocumentsStubSettings stub) throws IOException {
return new DocumentsSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return DocumentsStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return DocumentsStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DocumentsStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return DocumentsStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return DocumentsStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return DocumentsStubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return DocumentsStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected DocumentsSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for DocumentsSettings. */
public static class Builder extends ClientSettings.Builder<DocumentsSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(DocumentsStubSettings.newBuilder(clientContext));
}
protected Builder(DocumentsSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(DocumentsStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(DocumentsStubSettings.newBuilder());
}
public DocumentsStubSettings.Builder getStubSettingsBuilder() {
return ((DocumentsStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to listDocuments. */
public PagedCallSettings.Builder<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings() {
return getStubSettingsBuilder().listDocumentsSettings();
}
/** Returns the builder for the settings used for calls to getDocument. */
public UnaryCallSettings.Builder<GetDocumentRequest, Document> getDocumentSettings() {
return getStubSettingsBuilder().getDocumentSettings();
}
/** Returns the builder for the settings used for calls to createDocument. */
public UnaryCallSettings.Builder<CreateDocumentRequest, Operation> createDocumentSettings() {
return getStubSettingsBuilder().createDocumentSettings();
}
/** Returns the builder for the settings used for calls to createDocument. */
public OperationCallSettings.Builder<
CreateDocumentRequest, Document, KnowledgeOperationMetadata>
createDocumentOperationSettings() {
return getStubSettingsBuilder().createDocumentOperationSettings();
}
/** Returns the builder for the settings used for calls to importDocuments. */
public UnaryCallSettings.Builder<ImportDocumentsRequest, Operation> importDocumentsSettings() {
return getStubSettingsBuilder().importDocumentsSettings();
}
/** Returns the builder for the settings used for calls to importDocuments. */
public OperationCallSettings.Builder<
ImportDocumentsRequest, ImportDocumentsResponse, KnowledgeOperationMetadata>
importDocumentsOperationSettings() {
return getStubSettingsBuilder().importDocumentsOperationSettings();
}
/** Returns the builder for the settings used for calls to deleteDocument. */
public UnaryCallSettings.Builder<DeleteDocumentRequest, Operation> deleteDocumentSettings() {
return getStubSettingsBuilder().deleteDocumentSettings();
}
/** Returns the builder for the settings used for calls to deleteDocument. */
public OperationCallSettings.Builder<DeleteDocumentRequest, Empty, KnowledgeOperationMetadata>
deleteDocumentOperationSettings() {
return getStubSettingsBuilder().deleteDocumentOperationSettings();
}
/** Returns the builder for the settings used for calls to updateDocument. */
public UnaryCallSettings.Builder<UpdateDocumentRequest, Operation> updateDocumentSettings() {
return getStubSettingsBuilder().updateDocumentSettings();
}
/** Returns the builder for the settings used for calls to updateDocument. */
public OperationCallSettings.Builder<
UpdateDocumentRequest, Document, KnowledgeOperationMetadata>
updateDocumentOperationSettings() {
return getStubSettingsBuilder().updateDocumentOperationSettings();
}
/** Returns the builder for the settings used for calls to reloadDocument. */
public UnaryCallSettings.Builder<ReloadDocumentRequest, Operation> reloadDocumentSettings() {
return getStubSettingsBuilder().reloadDocumentSettings();
}
/** Returns the builder for the settings used for calls to reloadDocument. */
public OperationCallSettings.Builder<
ReloadDocumentRequest, Document, KnowledgeOperationMetadata>
reloadDocumentOperationSettings() {
return getStubSettingsBuilder().reloadDocumentOperationSettings();
}
@Override
public DocumentsSettings build() throws IOException {
return new DocumentsSettings(this);
}
}
}
| |
/*
* The MIT License
* Copyright (c) 2012 Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package microsoft.exchange.webservices.data.core.service.schema;
import microsoft.exchange.webservices.data.attribute.Schema;
import microsoft.exchange.webservices.data.core.XmlElementNames;
import microsoft.exchange.webservices.data.core.enumeration.service.ContactSource;
import microsoft.exchange.webservices.data.core.enumeration.misc.ExchangeVersion;
import microsoft.exchange.webservices.data.core.enumeration.service.FileAsMapping;
import microsoft.exchange.webservices.data.core.enumeration.property.PhysicalAddressIndex;
import microsoft.exchange.webservices.data.core.enumeration.property.PropertyDefinitionFlags;
import microsoft.exchange.webservices.data.property.complex.ByteArrayArray;
import microsoft.exchange.webservices.data.property.complex.CompleteName;
import microsoft.exchange.webservices.data.property.complex.EmailAddress;
import microsoft.exchange.webservices.data.property.complex.EmailAddressCollection;
import microsoft.exchange.webservices.data.property.complex.EmailAddressDictionary;
import microsoft.exchange.webservices.data.property.complex.ICreateComplexPropertyDelegate;
import microsoft.exchange.webservices.data.property.complex.ImAddressDictionary;
import microsoft.exchange.webservices.data.property.complex.PhoneNumberDictionary;
import microsoft.exchange.webservices.data.property.complex.PhysicalAddressDictionary;
import microsoft.exchange.webservices.data.property.complex.StringList;
import microsoft.exchange.webservices.data.property.definition.BoolPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.ByteArrayPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.ComplexPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.ContainedPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.DateTimePropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.GenericPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.IndexedPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.PropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.StringPropertyDefinition;
import java.util.EnumSet;
/**
* Represents the schema for contacts.
*/
@Schema
public class ContactSchema extends ItemSchema {
/**
* FieldURIs for contacts.
*/
private interface FieldUris {
/**
* The File as.
*/
String FileAs = "contacts:FileAs";
/**
* The File as mapping.
*/
String FileAsMapping = "contacts:FileAsMapping";
/**
* The Display name.
*/
String DisplayName = "contacts:DisplayName";
/**
* The Given name.
*/
String GivenName = "contacts:GivenName";
/**
* The Initials.
*/
String Initials = "contacts:Initials";
/**
* The Middle name.
*/
String MiddleName = "contacts:MiddleName";
/**
* The Nick name.
*/
String NickName = "contacts:Nickname";
/**
* The Complete name.
*/
String CompleteName = "contacts:CompleteName";
/**
* The Company name.
*/
String CompanyName = "contacts:CompanyName";
/**
* The Email address.
*/
String EmailAddress = "contacts:EmailAddress";
/**
* The Email addresses.
*/
String EmailAddresses = "contacts:EmailAddresses";
/**
* The Physical addresses.
*/
String PhysicalAddresses = "contacts:PhysicalAddresses";
/**
* The Phone number.
*/
String PhoneNumber = "contacts:PhoneNumber";
/**
* The Phone numbers.
*/
String PhoneNumbers = "contacts:PhoneNumbers";
/**
* The Assistant name.
*/
String AssistantName = "contacts:AssistantName";
/**
* The Birthday.
*/
String Birthday = "contacts:Birthday";
/**
* The Business home page.
*/
String BusinessHomePage = "contacts:BusinessHomePage";
/**
* The Children.
*/
String Children = "contacts:Children";
/**
* The Companies.
*/
String Companies = "contacts:Companies";
/**
* The Contact source.
*/
String ContactSource = "contacts:ContactSource";
/**
* The Department.
*/
String Department = "contacts:Department";
/**
* The Generation.
*/
String Generation = "contacts:Generation";
/**
* The Im address.
*/
String ImAddress = "contacts:ImAddress";
/**
* The Im addresses.
*/
String ImAddresses = "contacts:ImAddresses";
/**
* The Job title.
*/
String JobTitle = "contacts:JobTitle";
/**
* The Manager.
*/
String Manager = "contacts:Manager";
/**
* The Mileage.
*/
String Mileage = "contacts:Mileage";
/**
* The Office location.
*/
String OfficeLocation = "contacts:OfficeLocation";
/**
* The Physical address city.
*/
String PhysicalAddressCity = "contacts:PhysicalAddress:City";
/**
* The Physical address country or region.
*/
String PhysicalAddressCountryOrRegion =
"contacts:PhysicalAddress:CountryOrRegion";
/**
* The Physical address state.
*/
String PhysicalAddressState = "contacts:PhysicalAddress:State";
/**
* The Physical address street.
*/
String PhysicalAddressStreet = "contacts:PhysicalAddress:Street";
/**
* The Physical address postal code.
*/
String PhysicalAddressPostalCode =
"contacts:PhysicalAddress:PostalCode";
/**
* The Postal address index.
*/
String PostalAddressIndex = "contacts:PostalAddressIndex";
/**
* The Profession.
*/
String Profession = "contacts:Profession";
/**
* The Spouse name.
*/
String SpouseName = "contacts:SpouseName";
/**
* The Surname.
*/
String Surname = "contacts:Surname";
/**
* The Wedding anniversary.
*/
String WeddingAnniversary = "contacts:WeddingAnniversary";
/**
* The Has picture.
*/
String HasPicture = "contacts:HasPicture";
/**
* The PhoneticFullName.
*/
String PhoneticFullName = "contacts:PhoneticFullName";
/**
* The PhoneticFirstName.
*/
String PhoneticFirstName = "contacts:PhonetiFirstName";
/**
* The PhoneticFirstName.
*/
String PhoneticLastName = "contacts:PhonetiLastName";
/**
* The Aias.
*/
String Alias = "contacts:Alias";
/**
* The Notes.
*/
String Notes = "contacts:Notes";
/**
* The Photo.
*/
String Photo = "contacts:Photo";
/**
* The UserSMIMECertificate.
*/
String UserSMIMECertificate = "contacts:UserSMIMECertificate";
/**
* The MSExchangeCertificate.
*/
String MSExchangeCertificate = "contacts:MSExchangeCertificate";
/**
* The DirectoryId.
*/
String DirectoryId = "contacts:DirectoryId";
/**
* The ManagerMailbox.
*/
String ManagerMailbox = "contacts:ManagerMailbox";
/**
* The DirectReports.
*/
String DirectReports = "contacts:DirectReports";
}
/**
* Defines the FileAs property.
*/
public static final PropertyDefinition FileAs =
new StringPropertyDefinition(
XmlElementNames.FileAs, FieldUris.FileAs, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the FileAsMapping property.
*/
public static final PropertyDefinition FileAsMapping =
new GenericPropertyDefinition<FileAsMapping>(
FileAsMapping.class,
XmlElementNames.FileAsMapping, FieldUris.FileAsMapping, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the DisplayName property.
*/
public static final PropertyDefinition DisplayName =
new StringPropertyDefinition(
XmlElementNames.DisplayName, FieldUris.DisplayName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the GivenName property.
*/
public static final PropertyDefinition GivenName =
new StringPropertyDefinition(
XmlElementNames.GivenName, FieldUris.GivenName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Initials property.
*/
public static final PropertyDefinition Initials =
new StringPropertyDefinition(
XmlElementNames.Initials, FieldUris.Initials, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the MiddleName property.
*/
public static final PropertyDefinition MiddleName =
new StringPropertyDefinition(
XmlElementNames.MiddleName, FieldUris.MiddleName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the NickName property.
*/
public static final PropertyDefinition NickName =
new StringPropertyDefinition(
XmlElementNames.NickName, FieldUris.NickName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the CompleteName property.
*/
public static final PropertyDefinition CompleteName =
new ComplexPropertyDefinition<microsoft.exchange.webservices.data.property.complex.CompleteName>(
CompleteName.class,
XmlElementNames.CompleteName, FieldUris.CompleteName, EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate<CompleteName>() {
@Override
public CompleteName createComplexProperty() {
return new CompleteName();
}
});
/**
* Defines the CompanyName property.
*/
public static final PropertyDefinition CompanyName =
new StringPropertyDefinition(
XmlElementNames.CompanyName, FieldUris.CompanyName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the EmailAddresses property.
*/
public static final PropertyDefinition EmailAddresses =
new ComplexPropertyDefinition<EmailAddressDictionary>(
EmailAddressDictionary.class,
XmlElementNames.EmailAddresses,
FieldUris.EmailAddresses,
EnumSet.of(PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate
<EmailAddressDictionary>() {
@Override
public EmailAddressDictionary createComplexProperty() {
return new EmailAddressDictionary();
}
});
/**
* Defines the PhysicalAddresses property.
*/
public static final PropertyDefinition PhysicalAddresses =
new ComplexPropertyDefinition<PhysicalAddressDictionary>(
PhysicalAddressDictionary.class,
XmlElementNames.PhysicalAddresses,
FieldUris.PhysicalAddresses,
EnumSet.of(PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate
<PhysicalAddressDictionary>() {
@Override
public PhysicalAddressDictionary createComplexProperty() {
return new PhysicalAddressDictionary();
}
});
/**
* Defines the PhoneNumbers property.
*/
public static final PropertyDefinition PhoneNumbers =
new ComplexPropertyDefinition<PhoneNumberDictionary>(
PhoneNumberDictionary.class,
XmlElementNames.PhoneNumbers,
FieldUris.PhoneNumbers,
EnumSet.of(PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate
<PhoneNumberDictionary>() {
@Override
public PhoneNumberDictionary createComplexProperty() {
return new PhoneNumberDictionary();
}
});
/**
* Defines the AssistantName property.
*/
public static final PropertyDefinition AssistantName =
new StringPropertyDefinition(
XmlElementNames.AssistantName, FieldUris.AssistantName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Birthday property.
*/
public static final PropertyDefinition Birthday =
new DateTimePropertyDefinition(
XmlElementNames.Birthday, FieldUris.Birthday, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the BusinessHomePage property.
* <p/>
* Defined as anyURI in the EWS schema. String is fine here.
*/
public static final PropertyDefinition BusinessHomePage =
new StringPropertyDefinition(
XmlElementNames.BusinessHomePage, FieldUris.BusinessHomePage,
EnumSet.of(PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Children property.
*/
public static final PropertyDefinition Children =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.Children, FieldUris.Children, EnumSet.of(
PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
@Override
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the Companies property.
*/
public static final PropertyDefinition Companies =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.Companies, FieldUris.Companies, EnumSet.of(
PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
@Override
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the ContactSource property.
*/
public static final PropertyDefinition ContactSource =
new GenericPropertyDefinition<ContactSource>(
ContactSource.class,
XmlElementNames.ContactSource, FieldUris.ContactSource, EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Department property.
*/
public static final PropertyDefinition Department =
new StringPropertyDefinition(
XmlElementNames.Department, FieldUris.Department, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Generation property.
*/
public static final PropertyDefinition Generation =
new StringPropertyDefinition(
XmlElementNames.Generation, FieldUris.Generation, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the ImAddresses property.
*/
public static final PropertyDefinition ImAddresses =
new ComplexPropertyDefinition<ImAddressDictionary>(
ImAddressDictionary.class,
XmlElementNames.ImAddresses, FieldUris.ImAddresses, EnumSet.of(
PropertyDefinitionFlags.AutoInstantiateOnRead,
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate),
ExchangeVersion.Exchange2007_SP1,
new ICreateComplexPropertyDelegate<ImAddressDictionary>() {
@Override
public ImAddressDictionary createComplexProperty() {
return new ImAddressDictionary();
}
});
/**
* Defines the JobTitle property.
*/
public static final PropertyDefinition JobTitle =
new StringPropertyDefinition(
XmlElementNames.JobTitle, FieldUris.JobTitle, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Manager property.
*/
public static final PropertyDefinition Manager =
new StringPropertyDefinition(
XmlElementNames.Manager, FieldUris.Manager, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Mileage property.
*/
public static final PropertyDefinition Mileage =
new StringPropertyDefinition(
XmlElementNames.Mileage, FieldUris.Mileage, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the OfficeLocation property.
*/
public static final PropertyDefinition OfficeLocation =
new StringPropertyDefinition(
XmlElementNames.OfficeLocation, FieldUris.OfficeLocation, EnumSet
.of(PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the PostalAddressIndex property.
*/
public static final PropertyDefinition PostalAddressIndex =
new GenericPropertyDefinition<PhysicalAddressIndex>(
PhysicalAddressIndex.class,
XmlElementNames.PostalAddressIndex, FieldUris.PostalAddressIndex,
EnumSet.of(PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Profession property.
*/
public static final PropertyDefinition Profession =
new StringPropertyDefinition(
XmlElementNames.Profession, FieldUris.Profession, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the SpouseName property.
*/
public static final PropertyDefinition SpouseName =
new StringPropertyDefinition(
XmlElementNames.SpouseName, FieldUris.SpouseName, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the Surname property.
*/
public static final PropertyDefinition Surname =
new StringPropertyDefinition(
XmlElementNames.Surname, FieldUris.Surname, EnumSet.of(
PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the WeddingAnniversary property.
*/
public static final PropertyDefinition WeddingAnniversary =
new DateTimePropertyDefinition(
XmlElementNames.WeddingAnniversary, FieldUris.WeddingAnniversary,
EnumSet.of(PropertyDefinitionFlags.CanSet,
PropertyDefinitionFlags.CanUpdate,
PropertyDefinitionFlags.CanDelete,
PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2007_SP1);
/**
* Defines the HasPicture property.
*/
public static final PropertyDefinition HasPicture =
new BoolPropertyDefinition(
XmlElementNames.HasPicture, FieldUris.HasPicture, EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010);
/**
* Defines PhoeniticFullName property **
*/
public static final PropertyDefinition PhoneticFullName =
new StringPropertyDefinition(
XmlElementNames.PhoneticFullName,
FieldUris.PhoneticFullName,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines PhoenticFirstName property **
*/
public static final PropertyDefinition PhoneticFirstName =
new StringPropertyDefinition(
XmlElementNames.PhoneticFirstName,
FieldUris.PhoneticFirstName,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines PhoneticLastName Property **
*/
public static final PropertyDefinition PhoneticLastName =
new StringPropertyDefinition(
XmlElementNames.PhoneticLastName,
FieldUris.PhoneticLastName,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the Alias Property **
*/
public static final PropertyDefinition Alias =
new StringPropertyDefinition(
XmlElementNames.Alias,
FieldUris.Alias,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the Notes Property **
*/
public static final PropertyDefinition Notes =
new StringPropertyDefinition(
XmlElementNames.Notes,
FieldUris.Notes,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines Photo Property **
*/
public static final PropertyDefinition Photo =
new ByteArrayPropertyDefinition(
XmlElementNames.Photo,
FieldUris.Photo,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines UserSMIMECertificate Property **
*/
public static final PropertyDefinition UserSMIMECertificate =
new ComplexPropertyDefinition<ByteArrayArray>(
ByteArrayArray.class,
XmlElementNames.UserSMIMECertificate,
FieldUris.UserSMIMECertificate,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<ByteArrayArray>() {
@Override
public ByteArrayArray createComplexProperty() {
return new ByteArrayArray();
}
});
/**
* Defines MSExchangeCertificate Property **
*/
public static final PropertyDefinition MSExchangeCertificate =
new ComplexPropertyDefinition<ByteArrayArray>(
ByteArrayArray.class,
XmlElementNames.MSExchangeCertificate,
FieldUris.MSExchangeCertificate,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<ByteArrayArray>() {
@Override
public ByteArrayArray createComplexProperty() {
return new ByteArrayArray();
}
});
/**
* Defines DirectoryId Property **
*/
public static final PropertyDefinition DirectoryId =
new StringPropertyDefinition(
XmlElementNames.DirectoryId,
FieldUris.DirectoryId,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines ManagerMailbox Property **
*/
public static final PropertyDefinition ManagerMailbox =
new ContainedPropertyDefinition<EmailAddress>(
EmailAddress.class,
XmlElementNames.ManagerMailbox,
FieldUris.ManagerMailbox,
XmlElementNames.Mailbox,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<EmailAddress>() {
@Override
public EmailAddress createComplexProperty() {
return new EmailAddress();
}
});
/**
* Defines DirectReports Property **
*/
public static final PropertyDefinition DirectReports =
new ComplexPropertyDefinition<EmailAddressCollection>(
EmailAddressCollection.class,
XmlElementNames.DirectReports,
FieldUris.DirectReports,
EnumSet.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<EmailAddressCollection>() {
@Override
public EmailAddressCollection createComplexProperty()
{
return new EmailAddressCollection();
}
});
/**
* Defines the EmailAddress1 property.
*/
public static final IndexedPropertyDefinition EmailAddress1 =
new IndexedPropertyDefinition(
FieldUris.EmailAddress, "EmailAddress1");
/**
* Defines the EmailAddress2 property.
*/
public static final IndexedPropertyDefinition EmailAddress2 =
new IndexedPropertyDefinition(
FieldUris.EmailAddress, "EmailAddress2");
/**
* Defines the EmailAddress3 property.
*/
public static final IndexedPropertyDefinition EmailAddress3 =
new IndexedPropertyDefinition(
FieldUris.EmailAddress, "EmailAddress3");
/**
* Defines the ImAddress1 property.
*/
public static final IndexedPropertyDefinition ImAddress1 =
new IndexedPropertyDefinition(
FieldUris.ImAddress, "ImAddress1");
/**
* Defines the ImAddress2 property.
*/
public static final IndexedPropertyDefinition ImAddress2 =
new IndexedPropertyDefinition(
FieldUris.ImAddress, "ImAddress2");
/**
* Defines the ImAddress3 property.
*/
public static final IndexedPropertyDefinition ImAddress3 =
new IndexedPropertyDefinition(
FieldUris.ImAddress, "ImAddress3");
/**
* Defines the AssistentPhone property.
*/
public static final IndexedPropertyDefinition AssistantPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "AssistantPhone");
/**
* Defines the BusinessFax property.
*/
public static final IndexedPropertyDefinition BusinessFax =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "BusinessFax");
/**
* Defines the BusinessPhone property.
*/
public static final IndexedPropertyDefinition BusinessPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "BusinessPhone");
/**
* Defines the BusinessPhone2 property.
*/
public static final IndexedPropertyDefinition BusinessPhone2 =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "BusinessPhone2");
/**
* Defines the Callback property.
*/
public static final IndexedPropertyDefinition Callback =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "Callback");
/**
* Defines the CarPhone property.
*/
public static final IndexedPropertyDefinition CarPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "CarPhone");
/**
* Defines the CompanyMainPhone property.
*/
public static final IndexedPropertyDefinition CompanyMainPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "CompanyMainPhone");
/**
* Defines the HomeFax property.
*/
public static final IndexedPropertyDefinition HomeFax =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "HomeFax");
/**
* Defines the HomePhone property.
*/
public static final IndexedPropertyDefinition HomePhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "HomePhone");
/**
* Defines the HomePhone2 property.
*/
public static final IndexedPropertyDefinition HomePhone2 =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "HomePhone2");
/**
* Defines the Isdn property.
*/
public static final IndexedPropertyDefinition Isdn =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "Isdn");
/**
* Defines the MobilePhone property.
*/
public static final IndexedPropertyDefinition MobilePhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "MobilePhone");
/**
* Defines the OtherFax property.
*/
public static final IndexedPropertyDefinition OtherFax =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "OtherFax");
/**
* Defines the OtherTelephone property.
*/
public static final IndexedPropertyDefinition OtherTelephone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "OtherTelephone");
/**
* Defines the Pager property.
*/
public static final IndexedPropertyDefinition Pager =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "Pager");
/**
* Defines the PrimaryPhone property.
*/
public static final IndexedPropertyDefinition PrimaryPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "PrimaryPhone");
/**
* Defines the RadioPhone property.
*/
public static final IndexedPropertyDefinition RadioPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "RadioPhone");
/**
* Defines the Telex property.
*/
public static final IndexedPropertyDefinition Telex =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "Telex");
/**
* Defines the TtyTddPhone property.
*/
public static final IndexedPropertyDefinition TtyTddPhone =
new IndexedPropertyDefinition(
FieldUris.PhoneNumber, "TtyTddPhone");
/**
* Defines the BusinessAddressStreet property.
*/
public static final IndexedPropertyDefinition BusinessAddressStreet =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressStreet, "Business");
/**
* Defines the BusinessAddressCity property.
*/
public static final IndexedPropertyDefinition BusinessAddressCity =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCity, "Business");
/**
* Defines the BusinessAddressState property.
*/
public static final IndexedPropertyDefinition BusinessAddressState =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressState, "Business");
/**
* Defines the BusinessAddressCountryOrRegion property.
*/
public static final IndexedPropertyDefinition
BusinessAddressCountryOrRegion =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCountryOrRegion, "Business");
/**
* Defines the BusinessAddressPostalCode property.
*/
public static final IndexedPropertyDefinition BusinessAddressPostalCode =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressPostalCode, "Business");
/**
* Defines the HomeAddressStreet property.
*/
public static final IndexedPropertyDefinition HomeAddressStreet =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressStreet, "Home");
/**
* Defines the HomeAddressCity property.
*/
public static final IndexedPropertyDefinition HomeAddressCity =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCity, "Home");
/**
* Defines the HomeAddressState property.
*/
public static final IndexedPropertyDefinition HomeAddressState =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressState, "Home");
/**
* Defines the HomeAddressCountryOrRegion property.
*/
public static final IndexedPropertyDefinition HomeAddressCountryOrRegion =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCountryOrRegion, "Home");
/**
* Defines the HomeAddressPostalCode property.
*/
public static final IndexedPropertyDefinition HomeAddressPostalCode =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressPostalCode, "Home");
/**
* Defines the OtherAddressStreet property.
*/
public static final IndexedPropertyDefinition OtherAddressStreet =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressStreet, "Other");
/**
* Defines the OtherAddressCity property.
*/
public static final IndexedPropertyDefinition OtherAddressCity =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCity, "Other");
/**
* Defines the OtherAddressState property.
*/
public static final IndexedPropertyDefinition OtherAddressState =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressState, "Other");
/**
* Defines the OtherAddressCountryOrRegion property.
*/
public static final IndexedPropertyDefinition OtherAddressCountryOrRegion =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressCountryOrRegion, "Other");
/**
* Defines the OtherAddressPostalCode property.
*/
public static final IndexedPropertyDefinition OtherAddressPostalCode =
new IndexedPropertyDefinition(
FieldUris.PhysicalAddressPostalCode, "Other");
// This must be declared after the property definitions
/**
* The Constant Instance.
*/
public static final ContactSchema Instance = new ContactSchema();
/**
* Registers property.
* <p/>
* IMPORTANT NOTE: PROPERTIES MUST BE REGISTERED IN SCHEMA ORDER (i.e. the
* same order as they are defined in types.xsd)
*/
@Override
protected void registerProperties() {
super.registerProperties();
this.registerProperty(FileAs);
this.registerProperty(FileAsMapping);
this.registerProperty(DisplayName);
this.registerProperty(GivenName);
this.registerProperty(Initials);
this.registerProperty(MiddleName);
this.registerProperty(NickName);
this.registerProperty(CompleteName);
this.registerProperty(CompanyName);
this.registerProperty(EmailAddresses);
this.registerProperty(PhysicalAddresses);
this.registerProperty(PhoneNumbers);
this.registerProperty(AssistantName);
this.registerProperty(Birthday);
this.registerProperty(BusinessHomePage);
this.registerProperty(Children);
this.registerProperty(Companies);
this.registerProperty(ContactSource);
this.registerProperty(Department);
this.registerProperty(Generation);
this.registerProperty(ImAddresses);
this.registerProperty(JobTitle);
this.registerProperty(Manager);
this.registerProperty(Mileage);
this.registerProperty(OfficeLocation);
this.registerProperty(PostalAddressIndex);
this.registerProperty(Profession);
this.registerProperty(SpouseName);
this.registerProperty(Surname);
this.registerProperty(WeddingAnniversary);
this.registerProperty(HasPicture);
this.registerProperty(PhoneticFullName);
this.registerProperty(PhoneticFirstName);
this.registerProperty(PhoneticLastName);
this.registerProperty(Alias);
this.registerProperty(Notes);
this.registerProperty(Photo);
this.registerProperty(UserSMIMECertificate);
this.registerProperty(MSExchangeCertificate);
this.registerProperty(DirectoryId);
this.registerProperty(ManagerMailbox);
this.registerProperty(DirectReports);
this.registerIndexedProperty(EmailAddress1);
this.registerIndexedProperty(EmailAddress2);
this.registerIndexedProperty(EmailAddress3);
this.registerIndexedProperty(ImAddress1);
this.registerIndexedProperty(ImAddress2);
this.registerIndexedProperty(ImAddress3);
this.registerIndexedProperty(AssistantPhone);
this.registerIndexedProperty(BusinessFax);
this.registerIndexedProperty(BusinessPhone);
this.registerIndexedProperty(BusinessPhone2);
this.registerIndexedProperty(Callback);
this.registerIndexedProperty(CarPhone);
this.registerIndexedProperty(CompanyMainPhone);
this.registerIndexedProperty(HomeFax);
this.registerIndexedProperty(HomePhone);
this.registerIndexedProperty(HomePhone2);
this.registerIndexedProperty(Isdn);
this.registerIndexedProperty(MobilePhone);
this.registerIndexedProperty(OtherFax);
this.registerIndexedProperty(OtherTelephone);
this.registerIndexedProperty(Pager);
this.registerIndexedProperty(PrimaryPhone);
this.registerIndexedProperty(RadioPhone);
this.registerIndexedProperty(Telex);
this.registerIndexedProperty(TtyTddPhone);
this.registerIndexedProperty(BusinessAddressStreet);
this.registerIndexedProperty(BusinessAddressCity);
this.registerIndexedProperty(BusinessAddressState);
this.registerIndexedProperty(BusinessAddressCountryOrRegion);
this.registerIndexedProperty(BusinessAddressPostalCode);
this.registerIndexedProperty(HomeAddressStreet);
this.registerIndexedProperty(HomeAddressCity);
this.registerIndexedProperty(HomeAddressState);
this.registerIndexedProperty(HomeAddressCountryOrRegion);
this.registerIndexedProperty(HomeAddressPostalCode);
this.registerIndexedProperty(OtherAddressStreet);
this.registerIndexedProperty(OtherAddressCity);
this.registerIndexedProperty(OtherAddressState);
this.registerIndexedProperty(OtherAddressCountryOrRegion);
this.registerIndexedProperty(OtherAddressPostalCode);
}
/**
* Instantiates a new contact schema.
*/
ContactSchema() {
super();
}
}
| |
/*
* Copyright 2014 Aran Hakki
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package concurrency.messaging.latencytests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.junit.BeforeClass;
import org.junit.Test;
import concurrency.messaging.ConcurrentLinkedPollOfferQueue;
public class ConcurrentPollOfferQueueTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
private List<String> items = new LinkedList<String>();
@Test
public void test() throws InterruptedException {
final ConcurrentLinkedPollOfferQueue<Integer> queue = new ConcurrentLinkedPollOfferQueue<Integer>(5);
Runnable r1 = new Runnable() {
public void run() {
int count = 1;
while(count<=100){
queue.offer(count);
count++;
}
}
};
//EventFactory<Object> o = Object::new;
final List<Integer> integers = new ArrayList<Integer>();
Runnable r2 = new Runnable(){
public void run() {
Integer i;
while(true){
// try {
// Thread.sleep(100);
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
int polled = queue.poll();
integers.add(polled);
}
}
};
ExecutorService es = Executors.newCachedThreadPool();
es.execute(r1);
es.execute(r2);
Thread.sleep(15000);
assertEquals(100,integers.size());
}
long start = 0;
long end = 0;
@Test
public void testLatency() throws InterruptedException, ExecutionException{
final ConcurrentLinkedPollOfferQueue<String> queue = new ConcurrentLinkedPollOfferQueue<String>(10);
// final ArrayBlockingQueue<String> queue = new ArrayBlockingQueue(10);
final Runnable producer = new Runnable(){
public void run() {
start = System.nanoTime();
queue.offer("1");
}
};
final ExecutorService es = Executors.newCachedThreadPool();
Callable<Long> consumer = new Callable<Long>(){
public Long call() throws Exception {
es.execute(producer);
while(true){
String polled = queue.poll();
if (polled!=null && polled.equals("1")){
end = System.nanoTime();
return (end - start);
}
}
}
};
Future<Long> future = es.submit(consumer);
while(!future.isDone()){
}
long nano = future.get();
System.out.println("nano: "+nano);
System.out.println("micro: "+(nano/1000));
assertTrue((nano/1000)<100);
}
@Test
public void testAverageLatency() throws InterruptedException, ExecutionException{
final ConcurrentLinkedPollOfferQueue<String> queue = new ConcurrentLinkedPollOfferQueue<String>(10);
//final LinkedTransferQueue<String> queue = new LinkedTransferQueue<String>();
//final ConcurrentLinkedPollOfferQueue<String> queue = new ConcurrentLinkedPollOfferQueue<String>(10);
//final ArrayBlockingQueue<String> queue = new ArrayBlockingQueue<String>(10);
//final ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<String>();
final Callable<Long> producer = new Callable<Long>(){
public Long call() throws Exception {
long start = System.nanoTime();
queue.offer("1");
return start;
}
};
int size = 1000;
final ExecutorService es = Executors.newFixedThreadPool(size);
Callable<Long> consumer = new Callable<Long>() {
public Long call() throws Exception {
Future<Long> future = es.submit(producer);
while(true){
String polled = queue.poll();
if (polled!=null && polled.equals("1")){
long end = System.nanoTime();
while(!future.isDone()){
}
long diff = end - future.get();
return diff;
}
}
}
};
List<Long> results = new ArrayList<Long>();
for (int i=0;i<size;i++){
Future<Long> future = es.submit(consumer);
while(!future.isDone()){
//busy wait
try {
Thread.sleep(25);
} catch (InterruptedException e) {
}
}
results.add(future.get());
}
double sum = 0;
for (Long r : results){
sum = r + sum;
}
long min = Collections.min(results);
System.out.println("min nano: "+min);
long max = Collections.max(results);
System.out.println("max nano: "+max);
System.out.println("results: "+results);
System.out.println("average nano, minus anomalies: "+averageNanoMinusAnomalies(results,(int)(results.size()*0.05)));
assertTrue(averageNanoMinusAnomalies(results,(int)(results.size()*0.05))<13000);
}
private double averageNanoMinusAnomalies(List<Long> results, int noOfEndsToRemove){
double sum = 0;
for (Long r : results){
sum = r + sum;
}
List<Long> resultsCopy = new ArrayList<Long>();
resultsCopy.addAll(results);
Collections.sort(resultsCopy);
for (int i=0;i<noOfEndsToRemove;i++){
sum = sum - resultsCopy.get(i);
sum = sum - resultsCopy.get(resultsCopy.size()-1-i);
}
return ((sum)/(double)(resultsCopy.size()-(noOfEndsToRemove*2)));
}
private volatile Object volatileObject = new Object();
private volatile Object objectA = new Object();
private volatile Object objectB = new Object();
@Test
public void testVolatileReadLatency(){
long start = System.nanoTime();
Object r = volatileObject;
long end = System.nanoTime();
long diff = end - start;
System.out.println("testVolatileReadLatency: "+diff);
}
@Test
public void testVolatileWriteLatency(){
long start = System.nanoTime();
volatileObject = objectA;
long end = System.nanoTime();
long diff = end - start;
System.out.println("testVolatileWriteLatency: "+diff);
}
@Test
public void testVariableReadLatency(){
long start = System.nanoTime();
Object object = objectA;
long end = System.nanoTime();
long diff = end - start;
System.out.println("testVariableReadLatency: "+diff);
}
@Test
public void testVariableWriteLatency(){
long start = System.nanoTime();
objectA = objectB;
long end = System.nanoTime();
long diff = end - start;
System.out.println("testVariableWriteLatency: "+diff);
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved.
*
* Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common
* Development and Distribution License("CDDL") (collectively, the
* "License"). You may not use this file except in compliance with the
* License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html
* or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
* specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header
* Notice in each file and include the License file at
* nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the
* License Header, with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* The Original Software is NetBeans. The Initial Developer of the Original
* Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun
* Microsystems, Inc. All Rights Reserved.
*
* If you wish your version of this file to be governed by only the CDDL
* or only the GPL Version 2, indicate your decision by adding
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license." If you do not indicate a
* single choice of license, a recipient has the option to distribute
* your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above.
* However, if you add GPL Version 2 code and therefore, elected the GPL
* Version 2 license, then the option applies only if the new code is
* made subject to such option by the copyright holder.
*/
package com.bearsoft.org.netbeans.modules.form.editors;
import com.bearsoft.org.netbeans.modules.form.NamedPropertyEditor;
import java.beans.*;
import org.openide.util.NbBundle;
/**
* Editor for mnemonic property
*
* @author Josef Kozak
*/
public class MnemonicEditor extends PropertyEditorSupport implements NamedPropertyEditor {
/**
* Converts the char to String by either leaving the single char or by
* creating unicode escape.
*/
@Override
public String getAsText() {
Object ovalue = getValue();
char value = (char) 0;
if (java.lang.Character.class.isInstance(ovalue)) {
value = ((Character) ovalue).charValue();
} else if (java.lang.Integer.class.isInstance(ovalue)) {
value = (char) (((Integer) ovalue).intValue());
}
if (value == 0) {
return "";
}
final StringBuffer buf = new StringBuffer(6);
switch (value) {
case '\b':
buf.append("\\b");
break; // NOI18N
case '\t':
buf.append("\\t");
break; // NOI18N
case '\n':
buf.append("\\n");
break; // NOI18N
case '\f':
buf.append("\\f");
break; // NOI18N
case '\r':
buf.append("\\r");
break; // NOI18N
case '\\':
buf.append("\\\\");
break; // NOI18N
default:
if (value >= 0x0020 && value <= 0x007f) {
buf.append(value);
} else {
buf.append("\\u"); // NOI18N
String hex = Integer.toHexString(value);
for (int j = 0; j < 4 - hex.length(); j++) {
buf.append('0');
}
buf.append(hex);
}
}
return buf.toString();
}
/**
* Set the property value by parsing given String.
*
* @param text The string to be parsed.
*/
@Override
public void setAsText(String text) throws IllegalArgumentException {
if (text.length() < 1) {
setValue(new Integer(0));
} else if (text.length() == 1 && text.charAt(0) != '\\') {
setValue(new Character(text.charAt(0)));
} else if (text.charAt(0) == '\\') {
// backslash means unicode escape sequence
char value = 0;
char ch = text.length() >= 2 ? text.charAt(1) : '\\';
switch (ch) {
case 'b':
value = '\b';
break;
case 't':
value = '\t';
break;
case 'n':
value = '\n';
break;
case 'f':
value = '\f';
break;
case 'r':
value = '\r';
break;
case '\\':
value = '\\';
break;
case 'u':
String num = text.substring(2, text.length());
if (num.length() > 4) {
// ignore longer strings
return;
}
try {
int intValue = Integer.parseInt(num, 16);
value = (char) intValue;
break;
} catch (NumberFormatException nfe) {
// ignore non parsable strings
return;
}
default:
// ignore non-chars after backslash
return;
}
setValue(new Character(value));
} else {
try {
setValue(new Integer(text));
} catch (NumberFormatException e) {
setValue(text);
}
}
}
/**
* Accepts Integer, Character and String values. If the argument is a String
* the first character is taken as the new value.
*
* @param newValue new value
*/
@Override
public void setValue(Object newValue) throws IllegalArgumentException {
if (newValue instanceof Integer) {
super.setValue(newValue);
return;
}
if (newValue instanceof Character) {
super.setValue(new Integer((int) (((Character) newValue).charValue())));
return;
}
if (newValue instanceof String) {
String text = (String) newValue;
if (text.length() >= 1) {
super.setValue(new Integer((int) text.charAt(0)));
return;
}
}
throw new IllegalArgumentException();
}
/**
* This method is intended for use when generating Java code to set the
* value of the property. It should return a fragment of Java code that can
* be used to initialize a variable with the current property value. <p>
* Example results are "2", "new Color(127,127,34)", "Color.orange", etc.
*
* @return A fragment of Java code representing an initializer for the
* current value.
*/
@Override
public String getJavaInitializationString() {
return "'" + getAsText() + "'"; // NOI18N
}
// NamedPropertyEditor implementation
@Override
public String getDisplayName() {
return NbBundle.getMessage(MnemonicEditor.class, "CTL_MnemonicsEditor_DisplayName"); // NOI18N
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.lang.psi.impl.statements.typedef;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.stubs.EmptyStub;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes;
import org.jetbrains.plugins.groovy.lang.lexer.TokenSets;
import org.jetbrains.plugins.groovy.lang.parser.GroovyEmptyStubElementTypes;
import org.jetbrains.plugins.groovy.lang.parser.GroovyStubElementTypes;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrEnumDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrEnumConstant;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrEnumConstantList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMembersDeclaration;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
import org.jetbrains.plugins.groovy.lang.psi.impl.GrStubElementBase;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import java.util.ArrayList;
import java.util.List;
/**
* @author: Dmitry.Krasilschikov, ilyas
*/
public abstract class GrTypeDefinitionBodyBase extends GrStubElementBase<EmptyStub> implements GrTypeDefinitionBody {
public GrTypeDefinitionBodyBase(@NotNull ASTNode node) {
super(node);
}
public GrTypeDefinitionBodyBase(EmptyStub stub, final IStubElementType classBody) {
super(stub, classBody);
}
@Override
public abstract void accept(@NotNull GroovyElementVisitor visitor);
@Override
public String toString() {
return "Type definition body";
}
@Override
public GrField @NotNull [] getFields() {
GrVariableDeclaration[] declarations = getStubOrPsiChildren(GroovyStubElementTypes.VARIABLE_DECLARATION, GrVariableDeclaration.ARRAY_FACTORY);
List<GrField> result = new ArrayList<>();
for (GrVariableDeclaration declaration : declarations) {
GrVariable[] variables = declaration.getVariables();
for (GrVariable variable : variables) {
if (variable instanceof GrField) {
result.add((GrField)variable);
}
}
}
return result.toArray(GrField.EMPTY_ARRAY);
}
@Override
public GrMethod @NotNull [] getMethods() {
return getStubOrPsiChildren(TokenSets.METHOD_DEFS, GrMethod.ARRAY_FACTORY);
}
@Override
public GrMembersDeclaration @NotNull [] getMemberDeclarations() {
return findChildrenByClass(GrMembersDeclaration.class);
}
@Override
@Nullable
public PsiElement getLBrace() {
return findChildByType(GroovyTokenTypes.mLCURLY);
}
@Override
@Nullable
public PsiElement getRBrace() {
return findChildByType(GroovyTokenTypes.mRCURLY);
}
@Override
public GrClassInitializer @NotNull [] getInitializers() {
return findChildrenByClass(GrClassInitializer.class);
}
@Override
public GrTypeDefinition @NotNull [] getInnerClasses() {
return getStubOrPsiChildren(TokenSets.TYPE_DEFINITIONS, GrTypeDefinition.ARRAY_FACTORY);
}
@Override
public void removeVariable(GrVariable variable) {
PsiImplUtil.removeVariable(variable);
}
@Override
public GrVariableDeclaration addVariableDeclarationBefore(GrVariableDeclaration declaration, GrStatement anchor) throws IncorrectOperationException {
PsiElement rBrace = getRBrace();
if (anchor == null && rBrace == null) {
throw new IncorrectOperationException();
}
if (anchor != null && !this.equals(anchor.getParent())) {
throw new IncorrectOperationException();
}
ASTNode elemNode = declaration.getNode();
final ASTNode anchorNode = anchor != null ? anchor.getNode() : rBrace.getNode();
getNode().addChild(elemNode, anchorNode);
getNode().addLeaf(GroovyTokenTypes.mNLS, "\n", anchorNode);
return (GrVariableDeclaration) elemNode.getPsi();
}
@Override
public void deleteChildInternal(@NotNull ASTNode child) {
final PsiElement element = child.getPsi();
if (element instanceof GrTopStatement) {
PsiImplUtil.deleteStatementTail(this, element);
}
super.deleteChildInternal(child);
}
@Override
public void deleteChildRange(PsiElement first, PsiElement last) throws IncorrectOperationException {
if (last instanceof GrTopStatement) {
PsiImplUtil.deleteStatementTail(this, last);
}
super.deleteChildRange(first, last);
}
public static class GrClassBody extends GrTypeDefinitionBodyBase implements StubBasedPsiElement<EmptyStub> {
public GrClassBody(@NotNull ASTNode node) {
super(node);
}
public GrClassBody(EmptyStub stub) {
super(stub, GroovyEmptyStubElementTypes.CLASS_BODY);
}
@Override
public void accept(@NotNull GroovyElementVisitor visitor) {
visitor.visitTypeDefinitionBody(this);
}
}
public static class GrEnumBody extends GrTypeDefinitionBodyBase implements GrEnumDefinitionBody, StubBasedPsiElement<EmptyStub> {
public GrEnumBody(@NotNull ASTNode node) {
super(node);
}
public GrEnumBody(EmptyStub stub) {
super(stub, GroovyEmptyStubElementTypes.ENUM_BODY);
}
@Override
@Nullable
public GrEnumConstantList getEnumConstantList() {
return getStubOrPsiChild(GroovyEmptyStubElementTypes.ENUM_CONSTANTS);
}
@Override
public GrEnumConstant @NotNull [] getEnumConstants() {
GrEnumConstantList list = getEnumConstantList();
if (list != null) return list.getEnumConstants();
return GrEnumConstant.EMPTY_ARRAY;
}
@Override
public GrField @NotNull [] getFields() {
GrField[] bodyFields = super.getFields();
GrEnumConstant[] enumConstants = getEnumConstants();
if (bodyFields.length == 0) return enumConstants;
if (enumConstants.length == 0) return bodyFields;
return ArrayUtil.mergeArrays(bodyFields, enumConstants);
}
@Override
public void accept(@NotNull GroovyElementVisitor visitor) {
visitor.visitEnumDefinitionBody(this);
}
}
@Override
public ASTNode addInternal(ASTNode first, ASTNode last, ASTNode anchor, Boolean before) {
ASTNode afterLast = last.getTreeNext();
ASTNode next;
for (ASTNode child = first; child != afterLast; child = next) {
next = child.getTreeNext();
if (child.getElementType() == GroovyStubElementTypes.CONSTRUCTOR) {
ASTNode oldIdentifier = child.findChildByType(GroovyTokenTypes.mIDENT);
ASTNode newIdentifier = ((GrTypeDefinition)getParent()).getNameIdentifierGroovy().getNode().copyElement();
child.replaceChild(oldIdentifier, newIdentifier);
}
}
return super.addInternal(first, last, anchor, before);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/provenance/provenance.proto
package io.grafeas.v1beta1.provenance;
public interface BuildProvenanceOrBuilder
extends
// @@protoc_insertion_point(interface_extends:grafeas.v1beta1.provenance.BuildProvenance)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Unique identifier of the build.
* </pre>
*
* <code>string id = 1;</code>
*/
java.lang.String getId();
/**
*
*
* <pre>
* Unique identifier of the build.
* </pre>
*
* <code>string id = 1;</code>
*/
com.google.protobuf.ByteString getIdBytes();
/**
*
*
* <pre>
* ID of the project.
* </pre>
*
* <code>string project_id = 2;</code>
*/
java.lang.String getProjectId();
/**
*
*
* <pre>
* ID of the project.
* </pre>
*
* <code>string project_id = 2;</code>
*/
com.google.protobuf.ByteString getProjectIdBytes();
/**
*
*
* <pre>
* Commands requested by the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Command commands = 3;</code>
*/
java.util.List<io.grafeas.v1beta1.provenance.Command> getCommandsList();
/**
*
*
* <pre>
* Commands requested by the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Command commands = 3;</code>
*/
io.grafeas.v1beta1.provenance.Command getCommands(int index);
/**
*
*
* <pre>
* Commands requested by the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Command commands = 3;</code>
*/
int getCommandsCount();
/**
*
*
* <pre>
* Commands requested by the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Command commands = 3;</code>
*/
java.util.List<? extends io.grafeas.v1beta1.provenance.CommandOrBuilder>
getCommandsOrBuilderList();
/**
*
*
* <pre>
* Commands requested by the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Command commands = 3;</code>
*/
io.grafeas.v1beta1.provenance.CommandOrBuilder getCommandsOrBuilder(int index);
/**
*
*
* <pre>
* Output of the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Artifact built_artifacts = 4;</code>
*/
java.util.List<io.grafeas.v1beta1.provenance.Artifact> getBuiltArtifactsList();
/**
*
*
* <pre>
* Output of the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Artifact built_artifacts = 4;</code>
*/
io.grafeas.v1beta1.provenance.Artifact getBuiltArtifacts(int index);
/**
*
*
* <pre>
* Output of the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Artifact built_artifacts = 4;</code>
*/
int getBuiltArtifactsCount();
/**
*
*
* <pre>
* Output of the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Artifact built_artifacts = 4;</code>
*/
java.util.List<? extends io.grafeas.v1beta1.provenance.ArtifactOrBuilder>
getBuiltArtifactsOrBuilderList();
/**
*
*
* <pre>
* Output of the build.
* </pre>
*
* <code>repeated .grafeas.v1beta1.provenance.Artifact built_artifacts = 4;</code>
*/
io.grafeas.v1beta1.provenance.ArtifactOrBuilder getBuiltArtifactsOrBuilder(int index);
/**
*
*
* <pre>
* Time at which the build was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 5;</code>
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Time at which the build was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 5;</code>
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Time at which the build was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 5;</code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Time at which execution of the build was started.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 6;</code>
*/
boolean hasStartTime();
/**
*
*
* <pre>
* Time at which execution of the build was started.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 6;</code>
*/
com.google.protobuf.Timestamp getStartTime();
/**
*
*
* <pre>
* Time at which execution of the build was started.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 6;</code>
*/
com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder();
/**
*
*
* <pre>
* Time at which execution of the build was finished.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 7;</code>
*/
boolean hasEndTime();
/**
*
*
* <pre>
* Time at which execution of the build was finished.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 7;</code>
*/
com.google.protobuf.Timestamp getEndTime();
/**
*
*
* <pre>
* Time at which execution of the build was finished.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 7;</code>
*/
com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder();
/**
*
*
* <pre>
* E-mail address of the user who initiated this build. Note that this was the
* user's e-mail address at the time the build was initiated; this address may
* not represent the same end-user for all time.
* </pre>
*
* <code>string creator = 8;</code>
*/
java.lang.String getCreator();
/**
*
*
* <pre>
* E-mail address of the user who initiated this build. Note that this was the
* user's e-mail address at the time the build was initiated; this address may
* not represent the same end-user for all time.
* </pre>
*
* <code>string creator = 8;</code>
*/
com.google.protobuf.ByteString getCreatorBytes();
/**
*
*
* <pre>
* URI where any logs for this provenance were written.
* </pre>
*
* <code>string logs_uri = 9;</code>
*/
java.lang.String getLogsUri();
/**
*
*
* <pre>
* URI where any logs for this provenance were written.
* </pre>
*
* <code>string logs_uri = 9;</code>
*/
com.google.protobuf.ByteString getLogsUriBytes();
/**
*
*
* <pre>
* Details of the Source input to the build.
* </pre>
*
* <code>.grafeas.v1beta1.provenance.Source source_provenance = 10;</code>
*/
boolean hasSourceProvenance();
/**
*
*
* <pre>
* Details of the Source input to the build.
* </pre>
*
* <code>.grafeas.v1beta1.provenance.Source source_provenance = 10;</code>
*/
io.grafeas.v1beta1.provenance.Source getSourceProvenance();
/**
*
*
* <pre>
* Details of the Source input to the build.
* </pre>
*
* <code>.grafeas.v1beta1.provenance.Source source_provenance = 10;</code>
*/
io.grafeas.v1beta1.provenance.SourceOrBuilder getSourceProvenanceOrBuilder();
/**
*
*
* <pre>
* Trigger identifier if the build was triggered automatically; empty if not.
* </pre>
*
* <code>string trigger_id = 11;</code>
*/
java.lang.String getTriggerId();
/**
*
*
* <pre>
* Trigger identifier if the build was triggered automatically; empty if not.
* </pre>
*
* <code>string trigger_id = 11;</code>
*/
com.google.protobuf.ByteString getTriggerIdBytes();
/**
*
*
* <pre>
* Special options applied to this build. This is a catch-all field where
* build providers can enter any desired additional details.
* </pre>
*
* <code>map<string, string> build_options = 12;</code>
*/
int getBuildOptionsCount();
/**
*
*
* <pre>
* Special options applied to this build. This is a catch-all field where
* build providers can enter any desired additional details.
* </pre>
*
* <code>map<string, string> build_options = 12;</code>
*/
boolean containsBuildOptions(java.lang.String key);
/** Use {@link #getBuildOptionsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getBuildOptions();
/**
*
*
* <pre>
* Special options applied to this build. This is a catch-all field where
* build providers can enter any desired additional details.
* </pre>
*
* <code>map<string, string> build_options = 12;</code>
*/
java.util.Map<java.lang.String, java.lang.String> getBuildOptionsMap();
/**
*
*
* <pre>
* Special options applied to this build. This is a catch-all field where
* build providers can enter any desired additional details.
* </pre>
*
* <code>map<string, string> build_options = 12;</code>
*/
java.lang.String getBuildOptionsOrDefault(java.lang.String key, java.lang.String defaultValue);
/**
*
*
* <pre>
* Special options applied to this build. This is a catch-all field where
* build providers can enter any desired additional details.
* </pre>
*
* <code>map<string, string> build_options = 12;</code>
*/
java.lang.String getBuildOptionsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Version string of the builder at the time this build was executed.
* </pre>
*
* <code>string builder_version = 13;</code>
*/
java.lang.String getBuilderVersion();
/**
*
*
* <pre>
* Version string of the builder at the time this build was executed.
* </pre>
*
* <code>string builder_version = 13;</code>
*/
com.google.protobuf.ByteString getBuilderVersionBytes();
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
/**
* <p>
* A software license that can be associated with an Amazon EC2 instance when launched (ex. a Microsoft Windows license).
* </p>
*/
public class License {
/**
* The unique ID identifying the license.
*/
private String licenseId;
/**
* The license type (ex. "Microsoft/Windows/Standard").
*/
private String type;
/**
* The name of the pool in which the license is kept.
*/
private String pool;
/**
* The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*/
private java.util.List<LicenseCapacity> capacities;
/**
* A list of tags for the License.
*/
private java.util.List<Tag> tags;
/**
* The unique ID identifying the license.
*
* @return The unique ID identifying the license.
*/
public String getLicenseId() {
return licenseId;
}
/**
* The unique ID identifying the license.
*
* @param licenseId The unique ID identifying the license.
*/
public void setLicenseId(String licenseId) {
this.licenseId = licenseId;
}
/**
* The unique ID identifying the license.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param licenseId The unique ID identifying the license.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withLicenseId(String licenseId) {
this.licenseId = licenseId;
return this;
}
/**
* The license type (ex. "Microsoft/Windows/Standard").
*
* @return The license type (ex. "Microsoft/Windows/Standard").
*/
public String getType() {
return type;
}
/**
* The license type (ex. "Microsoft/Windows/Standard").
*
* @param type The license type (ex. "Microsoft/Windows/Standard").
*/
public void setType(String type) {
this.type = type;
}
/**
* The license type (ex. "Microsoft/Windows/Standard").
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param type The license type (ex. "Microsoft/Windows/Standard").
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withType(String type) {
this.type = type;
return this;
}
/**
* The name of the pool in which the license is kept.
*
* @return The name of the pool in which the license is kept.
*/
public String getPool() {
return pool;
}
/**
* The name of the pool in which the license is kept.
*
* @param pool The name of the pool in which the license is kept.
*/
public void setPool(String pool) {
this.pool = pool;
}
/**
* The name of the pool in which the license is kept.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param pool The name of the pool in which the license is kept.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withPool(String pool) {
this.pool = pool;
return this;
}
/**
* The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*
* @return The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*/
public java.util.List<LicenseCapacity> getCapacities() {
if (capacities == null) {
capacities = new java.util.ArrayList<LicenseCapacity>();
}
return capacities;
}
/**
* The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*
* @param capacities The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*/
public void setCapacities(java.util.Collection<LicenseCapacity> capacities) {
if (capacities == null) {
this.capacities = null;
return;
}
java.util.List<LicenseCapacity> capacitiesCopy = new java.util.ArrayList<LicenseCapacity>(capacities.size());
capacitiesCopy.addAll(capacities);
this.capacities = capacitiesCopy;
}
/**
* The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param capacities The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withCapacities(LicenseCapacity... capacities) {
if (getCapacities() == null) setCapacities(new java.util.ArrayList<LicenseCapacity>(capacities.length));
for (LicenseCapacity value : capacities) {
getCapacities().add(value);
}
return this;
}
/**
* The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param capacities The capacities available for this license, indicating how many
* licenses are in use, how many are available, how many Amazon EC2
* instances can be supported, etc.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withCapacities(java.util.Collection<LicenseCapacity> capacities) {
if (capacities == null) {
this.capacities = null;
} else {
java.util.List<LicenseCapacity> capacitiesCopy = new java.util.ArrayList<LicenseCapacity>(capacities.size());
capacitiesCopy.addAll(capacities);
this.capacities = capacitiesCopy;
}
return this;
}
/**
* A list of tags for the License.
*
* @return A list of tags for the License.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new java.util.ArrayList<Tag>();
}
return tags;
}
/**
* A list of tags for the License.
*
* @param tags A list of tags for the License.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
java.util.List<Tag> tagsCopy = new java.util.ArrayList<Tag>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
/**
* A list of tags for the License.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags for the License.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withTags(Tag... tags) {
if (getTags() == null) setTags(new java.util.ArrayList<Tag>(tags.length));
for (Tag value : tags) {
getTags().add(value);
}
return this;
}
/**
* A list of tags for the License.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags for the License.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public License withTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
} else {
java.util.List<Tag> tagsCopy = new java.util.ArrayList<Tag>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (licenseId != null) sb.append("LicenseId: " + licenseId + ", ");
if (type != null) sb.append("Type: " + type + ", ");
if (pool != null) sb.append("Pool: " + pool + ", ");
if (capacities != null) sb.append("Capacities: " + capacities + ", ");
if (tags != null) sb.append("Tags: " + tags + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getLicenseId() == null) ? 0 : getLicenseId().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getPool() == null) ? 0 : getPool().hashCode());
hashCode = prime * hashCode + ((getCapacities() == null) ? 0 : getCapacities().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof License == false) return false;
License other = (License)obj;
if (other.getLicenseId() == null ^ this.getLicenseId() == null) return false;
if (other.getLicenseId() != null && other.getLicenseId().equals(this.getLicenseId()) == false) return false;
if (other.getType() == null ^ this.getType() == null) return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false) return false;
if (other.getPool() == null ^ this.getPool() == null) return false;
if (other.getPool() != null && other.getPool().equals(this.getPool()) == false) return false;
if (other.getCapacities() == null ^ this.getCapacities() == null) return false;
if (other.getCapacities() != null && other.getCapacities().equals(this.getCapacities()) == false) return false;
if (other.getTags() == null ^ this.getTags() == null) return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false;
return true;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
/**
* <p>
* Describes a rule.
* </p>
*/
public class TopicRulePayload implements Serializable, Cloneable {
/**
* <p>
* The SQL statement used to query the topic. For more information, see <a
* href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
* </p>
*/
private String sql;
/**
* <p>
* The description of the rule.
* </p>
*/
private String description;
/**
* <p>
* The actions associated with the rule.
* </p>
*/
private java.util.List<Action> actions;
/**
* <p>
* Specifies whether the rule is disabled.
* </p>
*/
private Boolean ruleDisabled;
/**
* <p>
* The version of the SQL rules engine to use when evaluating the rule.
* </p>
*/
private String awsIotSqlVersion;
/**
* <p>
* The SQL statement used to query the topic. For more information, see <a
* href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
* </p>
*
* @param sql
* The SQL statement used to query the topic. For more information,
* see <a href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
*/
public void setSql(String sql) {
this.sql = sql;
}
/**
* <p>
* The SQL statement used to query the topic. For more information, see <a
* href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
* </p>
*
* @return The SQL statement used to query the topic. For more information,
* see <a href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
*/
public String getSql() {
return this.sql;
}
/**
* <p>
* The SQL statement used to query the topic. For more information, see <a
* href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
* </p>
*
* @param sql
* The SQL statement used to query the topic. For more information,
* see <a href=
* "http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference"
* >AWS IoT SQL Reference</a> in the <i>AWS IoT Developer Guide</i>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withSql(String sql) {
setSql(sql);
return this;
}
/**
* <p>
* The description of the rule.
* </p>
*
* @param description
* The description of the rule.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description of the rule.
* </p>
*
* @return The description of the rule.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description of the rule.
* </p>
*
* @param description
* The description of the rule.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The actions associated with the rule.
* </p>
*
* @return The actions associated with the rule.
*/
public java.util.List<Action> getActions() {
return actions;
}
/**
* <p>
* The actions associated with the rule.
* </p>
*
* @param actions
* The actions associated with the rule.
*/
public void setActions(java.util.Collection<Action> actions) {
if (actions == null) {
this.actions = null;
return;
}
this.actions = new java.util.ArrayList<Action>(actions);
}
/**
* <p>
* The actions associated with the rule.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setActions(java.util.Collection)} or
* {@link #withActions(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param actions
* The actions associated with the rule.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withActions(Action... actions) {
if (this.actions == null) {
setActions(new java.util.ArrayList<Action>(actions.length));
}
for (Action ele : actions) {
this.actions.add(ele);
}
return this;
}
/**
* <p>
* The actions associated with the rule.
* </p>
*
* @param actions
* The actions associated with the rule.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withActions(java.util.Collection<Action> actions) {
setActions(actions);
return this;
}
/**
* <p>
* Specifies whether the rule is disabled.
* </p>
*
* @param ruleDisabled
* Specifies whether the rule is disabled.
*/
public void setRuleDisabled(Boolean ruleDisabled) {
this.ruleDisabled = ruleDisabled;
}
/**
* <p>
* Specifies whether the rule is disabled.
* </p>
*
* @return Specifies whether the rule is disabled.
*/
public Boolean getRuleDisabled() {
return this.ruleDisabled;
}
/**
* <p>
* Specifies whether the rule is disabled.
* </p>
*
* @param ruleDisabled
* Specifies whether the rule is disabled.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withRuleDisabled(Boolean ruleDisabled) {
setRuleDisabled(ruleDisabled);
return this;
}
/**
* <p>
* Specifies whether the rule is disabled.
* </p>
*
* @return Specifies whether the rule is disabled.
*/
public Boolean isRuleDisabled() {
return this.ruleDisabled;
}
/**
* <p>
* The version of the SQL rules engine to use when evaluating the rule.
* </p>
*
* @param awsIotSqlVersion
* The version of the SQL rules engine to use when evaluating the
* rule.
*/
public void setAwsIotSqlVersion(String awsIotSqlVersion) {
this.awsIotSqlVersion = awsIotSqlVersion;
}
/**
* <p>
* The version of the SQL rules engine to use when evaluating the rule.
* </p>
*
* @return The version of the SQL rules engine to use when evaluating the
* rule.
*/
public String getAwsIotSqlVersion() {
return this.awsIotSqlVersion;
}
/**
* <p>
* The version of the SQL rules engine to use when evaluating the rule.
* </p>
*
* @param awsIotSqlVersion
* The version of the SQL rules engine to use when evaluating the
* rule.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TopicRulePayload withAwsIotSqlVersion(String awsIotSqlVersion) {
setAwsIotSqlVersion(awsIotSqlVersion);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSql() != null)
sb.append("Sql: " + getSql() + ",");
if (getDescription() != null)
sb.append("Description: " + getDescription() + ",");
if (getActions() != null)
sb.append("Actions: " + getActions() + ",");
if (getRuleDisabled() != null)
sb.append("RuleDisabled: " + getRuleDisabled() + ",");
if (getAwsIotSqlVersion() != null)
sb.append("AwsIotSqlVersion: " + getAwsIotSqlVersion());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TopicRulePayload == false)
return false;
TopicRulePayload other = (TopicRulePayload) obj;
if (other.getSql() == null ^ this.getSql() == null)
return false;
if (other.getSql() != null
&& other.getSql().equals(this.getSql()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null
&& other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getActions() == null ^ this.getActions() == null)
return false;
if (other.getActions() != null
&& other.getActions().equals(this.getActions()) == false)
return false;
if (other.getRuleDisabled() == null ^ this.getRuleDisabled() == null)
return false;
if (other.getRuleDisabled() != null
&& other.getRuleDisabled().equals(this.getRuleDisabled()) == false)
return false;
if (other.getAwsIotSqlVersion() == null
^ this.getAwsIotSqlVersion() == null)
return false;
if (other.getAwsIotSqlVersion() != null
&& other.getAwsIotSqlVersion().equals(
this.getAwsIotSqlVersion()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getSql() == null) ? 0 : getSql().hashCode());
hashCode = prime
* hashCode
+ ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode
+ ((getActions() == null) ? 0 : getActions().hashCode());
hashCode = prime
* hashCode
+ ((getRuleDisabled() == null) ? 0 : getRuleDisabled()
.hashCode());
hashCode = prime
* hashCode
+ ((getAwsIotSqlVersion() == null) ? 0 : getAwsIotSqlVersion()
.hashCode());
return hashCode;
}
@Override
public TopicRulePayload clone() {
try {
return (TopicRulePayload) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 2008-2018 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.gui.actions.list;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.Configuration;
import com.haulmont.cuba.core.global.Messages;
import com.haulmont.cuba.core.global.Security;
import com.haulmont.cuba.gui.ScreenBuilders;
import com.haulmont.cuba.gui.builders.EditorBuilder;
import com.haulmont.cuba.gui.components.Action;
import com.haulmont.cuba.gui.components.ActionType;
import com.haulmont.cuba.gui.components.Component;
import com.haulmont.cuba.gui.components.actions.ListAction;
import com.haulmont.cuba.gui.components.data.meta.EntityDataUnit;
import com.haulmont.cuba.gui.icons.CubaIcon;
import com.haulmont.cuba.gui.icons.Icons;
import com.haulmont.cuba.gui.meta.*;
import com.haulmont.cuba.gui.screen.*;
import com.haulmont.cuba.gui.sys.ActionScreenInitializer;
import com.haulmont.cuba.security.entity.EntityOp;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.haulmont.cuba.gui.screen.FrameOwner.WINDOW_COMMIT_AND_CLOSE_ACTION;
/**
* Standard action for creating an entity instance using its editor screen.
* <p>
* Should be defined for a list component ({@code Table}, {@code DataGrid}, etc.) in a screen XML descriptor.
* <p>
* The action instance can be parameterized using the nested {@code properties} XML element or programmatically in the
* screen controller.
*
* @param <E> type of entity
*/
@StudioAction(category = "List Actions", description = "Creates an entity instance using its editor screen")
@ActionType(CreateAction.ID)
public class CreateAction<E extends Entity> extends ListAction implements Action.DisabledWhenScreenReadOnly,
Action.ExecutableAction {
public static final String ID = "create";
@Inject
protected ScreenBuilders screenBuilders;
@Inject
protected Security security;
protected ActionScreenInitializer screenInitializer = new ActionScreenInitializer();
protected Supplier<E> newEntitySupplier;
protected Consumer<E> initializer;
protected Consumer<E> afterCommitHandler;
protected Function<E, E> transformation;
public CreateAction() {
this(ID);
}
public CreateAction(String id) {
super(id);
this.primary = true;
}
/**
* Returns the editor screen open mode if it was set by {@link #setOpenMode(OpenMode)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public OpenMode getOpenMode() {
return screenInitializer.getOpenMode();
}
/**
* Sets the editor screen open mode.
*/
@StudioPropertiesItem
public void setOpenMode(OpenMode openMode) {
screenInitializer.setOpenMode(openMode);
}
/**
* Returns the editor screen id if it was set by {@link #setScreenId(String)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public String getScreenId() {
return screenInitializer.getScreenId();
}
/**
* Sets the editor screen id.
*/
@StudioPropertiesItem
public void setScreenId(String screenId) {
screenInitializer.setScreenId(screenId);
}
/**
* Returns the editor screen class if it was set by {@link #setScreenClass(Class)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public Class getScreenClass() {
return screenInitializer.getScreenClass();
}
/**
* Sets the editor screen class.
*/
@StudioPropertiesItem
public void setScreenClass(Class screenClass) {
screenInitializer.setScreenClass(screenClass);
}
/**
* Sets the editor screen options supplier. The supplier provides {@code ScreenOptions} to the
* opened screen.
* <p>
* The preferred way to set the supplier is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "screenOptionsSupplier")
* protected ScreenOptions petsTableCreateScreenOptionsSupplier() {
* return new MapScreenOptions(ParamsMap.of("someParameter", 10));
* }
* </pre>
*/
public void setScreenOptionsSupplier(Supplier<ScreenOptions> screenOptionsSupplier) {
screenInitializer.setScreenOptionsSupplier(screenOptionsSupplier);
}
/**
* Sets the editor screen configurer. Use the configurer if you need to provide parameters to the
* opened screen through setters.
* <p>
* The preferred way to set the configurer is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "screenConfigurer")
* protected void petsTableCreateScreenConfigurer(Screen editorScreen) {
* ((PetEdit) editorScreen).setSomeParameter(someValue);
* }
* </pre>
*/
public void setScreenConfigurer(Consumer<Screen> screenConfigurer) {
screenInitializer.setScreenConfigurer(screenConfigurer);
}
/**
* Sets the handler to be invoked when the editor screen closes.
* <p>
* The preferred way to set the handler is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "afterCloseHandler")
* protected void petsTableCreateAfterCloseHandler(AfterCloseEvent event) {
* if (event.closedWith(StandardOutcome.COMMIT)) {
* System.out.println("Committed");
* }
* }
* </pre>
*/
public void setAfterCloseHandler(Consumer<Screen.AfterCloseEvent> afterCloseHandler) {
screenInitializer.setAfterCloseHandler(afterCloseHandler);
}
/**
* Sets the new entity supplier. The supplier should return a new entity instance.
* <p>
* The preferred way to set the supplier is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "newEntitySupplier")
* protected Pet petsTableCreateNewEntitySupplier() {
* Pet pet = metadata.create(Pet.class);
* pet.setName("a cat");
* return pet;
* }
* </pre>
*/
public void setNewEntitySupplier(Supplier<E> newEntitySupplier) {
this.newEntitySupplier = newEntitySupplier;
}
/**
* Sets the new entity initializer. The initializer accepts the new entity instance and can perform its
* initialization.
* <p>
* The preferred way to set the initializer is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "initializer")
* protected void petsTableCreateInitializer(Pet entity) {
* entity.setName("a cat");
* }
* </pre>
*/
public void setInitializer(Consumer<E> initializer) {
this.initializer = initializer;
}
/**
* Sets the handler to be invoked when the editor screen commits the new entity.
* <p>
* The preferred way to set the handler is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "afterCommitHandler")
* protected void petsTableCreateAfterCommitHandler(Pet entity) {
* System.out.println("Created " + entity);
* }
* </pre>
*/
public void setAfterCommitHandler(Consumer<E> afterCommitHandler) {
this.afterCommitHandler = afterCommitHandler;
}
/**
* Sets the function to transform the committed in the editor screen entity before setting it to the target data container.
* <p>
* The preferred way to set the function is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "transformation")
* protected Pet petsTableCreateTransformation(Pet entity) {
* return doTransform(entity);
* }
* </pre>
*/
public void setTransformation(Function<E, E> transformation) {
this.transformation = transformation;
}
@Inject
protected void setMessages(Messages messages) {
this.caption = messages.getMainMessage("actions.Create");
}
@Inject
protected void setIcons(Icons icons) {
this.icon = icons.get(CubaIcon.CREATE_ACTION);
}
@Inject
protected void setConfiguration(Configuration configuration) {
ClientConfig clientConfig = configuration.getConfig(ClientConfig.class);
setShortcut(clientConfig.getTableInsertShortcut());
}
@Override
protected boolean isPermitted() {
if (target == null || !(target.getItems() instanceof EntityDataUnit)) {
return false;
}
MetaClass metaClass = ((EntityDataUnit) target.getItems()).getEntityMetaClass();
if (metaClass == null) {
return true;
}
boolean createPermitted = security.isEntityOpPermitted(metaClass, EntityOp.CREATE);
if (!createPermitted) {
return false;
}
return super.isPermitted();
}
@Override
public void actionPerform(Component component) {
// if standard behaviour
if (!hasSubscriptions(ActionPerformedEvent.class)) {
execute();
} else {
super.actionPerform(component);
}
}
/**
* Executes the action.
*/
@SuppressWarnings("unchecked")
@Override
public void execute() {
if (target == null) {
throw new IllegalStateException("CreateAction target is not set");
}
if (!(target.getItems() instanceof EntityDataUnit)) {
throw new IllegalStateException("CreateAction target items is null or does not implement EntityDataUnit");
}
MetaClass metaClass = ((EntityDataUnit) target.getItems()).getEntityMetaClass();
if (metaClass == null) {
throw new IllegalStateException("Target is not bound to entity");
}
EditorBuilder builder = screenBuilders.editor(target);
if (newEntitySupplier != null) {
E entity = newEntitySupplier.get();
builder = builder.newEntity(entity);
} else {
builder = builder.newEntity();
}
if (initializer != null) {
builder = builder.withInitializer(initializer);
}
builder = screenInitializer.initBuilder(builder);
if (transformation != null) {
builder.withTransformation(transformation);
}
Screen editor = builder.build();
if (afterCommitHandler != null) {
editor.addAfterCloseListener(afterCloseEvent -> {
CloseAction closeAction = afterCloseEvent.getCloseAction();
if (closeAction.equals(WINDOW_COMMIT_AND_CLOSE_ACTION)) {
Entity committedEntity = ((EditorScreen) editor).getEditedEntity();
afterCommitHandler.accept((E) committedEntity);
}
});
}
screenInitializer.initScreen(editor);
editor.show();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.appservice.implementation;
import com.azure.core.annotation.BodyParam;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.Post;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.appservice.fluent.TopLevelDomainsClient;
import com.azure.resourcemanager.appservice.fluent.models.TldLegalAgreementInner;
import com.azure.resourcemanager.appservice.fluent.models.TopLevelDomainInner;
import com.azure.resourcemanager.appservice.models.DefaultErrorResponseErrorException;
import com.azure.resourcemanager.appservice.models.TldLegalAgreementCollection;
import com.azure.resourcemanager.appservice.models.TopLevelDomainAgreementOption;
import com.azure.resourcemanager.appservice.models.TopLevelDomainCollection;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in TopLevelDomainsClient. */
public final class TopLevelDomainsClientImpl implements TopLevelDomainsClient {
private final ClientLogger logger = new ClientLogger(TopLevelDomainsClientImpl.class);
/** The proxy service used to perform REST calls. */
private final TopLevelDomainsService service;
/** The service client containing this operation class. */
private final WebSiteManagementClientImpl client;
/**
* Initializes an instance of TopLevelDomainsClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
TopLevelDomainsClientImpl(WebSiteManagementClientImpl client) {
this.service =
RestProxy.create(TopLevelDomainsService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for WebSiteManagementClientTopLevelDomains to be used by the proxy
* service to perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "WebSiteManagementCli")
private interface TopLevelDomainsService {
@Headers({"Content-Type: application/json"})
@Get("/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(DefaultErrorResponseErrorException.class)
Mono<Response<TopLevelDomainCollection>> list(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains/{name}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(DefaultErrorResponseErrorException.class)
Mono<Response<TopLevelDomainInner>> get(
@HostParam("$host") String endpoint,
@PathParam("name") String name,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Post(
"/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains/{name}"
+ "/listAgreements")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(DefaultErrorResponseErrorException.class)
Mono<Response<TldLegalAgreementCollection>> listAgreements(
@HostParam("$host") String endpoint,
@PathParam("name") String name,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("api-version") String apiVersion,
@BodyParam("application/json") TopLevelDomainAgreementOption agreementOption,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(DefaultErrorResponseErrorException.class)
Mono<Response<TopLevelDomainCollection>> listNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(DefaultErrorResponseErrorException.class)
Mono<Response<TldLegalAgreementCollection>> listAgreementsNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Get all top-level domains supported for registration.
*
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TopLevelDomainInner>> listSinglePageAsync() {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context))
.<PagedResponse<TopLevelDomainInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get all top-level domains supported for registration.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TopLevelDomainInner>> listSinglePageAsync(Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Get all top-level domains supported for registration.
*
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<TopLevelDomainInner> listAsync() {
return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Get all top-level domains supported for registration.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<TopLevelDomainInner> listAsync(Context context) {
return new PagedFlux<>(
() -> listSinglePageAsync(context), nextLink -> listNextSinglePageAsync(nextLink, context));
}
/**
* Get all top-level domains supported for registration.
*
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TopLevelDomainInner> list() {
return new PagedIterable<>(listAsync());
}
/**
* Get all top-level domains supported for registration.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all top-level domains supported for registration.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TopLevelDomainInner> list(Context context) {
return new PagedIterable<>(listAsync(context));
}
/**
* Get details of a top-level domain.
*
* @param name Name of the top-level domain.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a top-level domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<TopLevelDomainInner>> getWithResponseAsync(String name) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (name == null) {
return Mono.error(new IllegalArgumentException("Parameter name is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.get(
this.client.getEndpoint(),
name,
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get details of a top-level domain.
*
* @param name Name of the top-level domain.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a top-level domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<TopLevelDomainInner>> getWithResponseAsync(String name, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (name == null) {
return Mono.error(new IllegalArgumentException("Parameter name is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.get(
this.client.getEndpoint(),
name,
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context);
}
/**
* Get details of a top-level domain.
*
* @param name Name of the top-level domain.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a top-level domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<TopLevelDomainInner> getAsync(String name) {
return getWithResponseAsync(name)
.flatMap(
(Response<TopLevelDomainInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Get details of a top-level domain.
*
* @param name Name of the top-level domain.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a top-level domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TopLevelDomainInner get(String name) {
return getAsync(name).block();
}
/**
* Get details of a top-level domain.
*
* @param name Name of the top-level domain.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a top-level domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TopLevelDomainInner> getWithResponse(String name, Context context) {
return getWithResponseAsync(name, context).block();
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TldLegalAgreementInner>> listAgreementsSinglePageAsync(
String name, TopLevelDomainAgreementOption agreementOption) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (name == null) {
return Mono.error(new IllegalArgumentException("Parameter name is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (agreementOption == null) {
return Mono
.error(new IllegalArgumentException("Parameter agreementOption is required and cannot be null."));
} else {
agreementOption.validate();
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.listAgreements(
this.client.getEndpoint(),
name,
this.client.getSubscriptionId(),
this.client.getApiVersion(),
agreementOption,
accept,
context))
.<PagedResponse<TldLegalAgreementInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TldLegalAgreementInner>> listAgreementsSinglePageAsync(
String name, TopLevelDomainAgreementOption agreementOption, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (name == null) {
return Mono.error(new IllegalArgumentException("Parameter name is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (agreementOption == null) {
return Mono
.error(new IllegalArgumentException("Parameter agreementOption is required and cannot be null."));
} else {
agreementOption.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listAgreements(
this.client.getEndpoint(),
name,
this.client.getSubscriptionId(),
this.client.getApiVersion(),
agreementOption,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<TldLegalAgreementInner> listAgreementsAsync(
String name, TopLevelDomainAgreementOption agreementOption) {
return new PagedFlux<>(
() -> listAgreementsSinglePageAsync(name, agreementOption),
nextLink -> listAgreementsNextSinglePageAsync(nextLink));
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<TldLegalAgreementInner> listAgreementsAsync(
String name, TopLevelDomainAgreementOption agreementOption, Context context) {
return new PagedFlux<>(
() -> listAgreementsSinglePageAsync(name, agreementOption, context),
nextLink -> listAgreementsNextSinglePageAsync(nextLink, context));
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TldLegalAgreementInner> listAgreements(
String name, TopLevelDomainAgreementOption agreementOption) {
return new PagedIterable<>(listAgreementsAsync(name, agreementOption));
}
/**
* Gets all legal agreements that user needs to accept before purchasing a domain.
*
* @param name Name of the top-level domain.
* @param agreementOption Domain agreement options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all legal agreements that user needs to accept before purchasing a domain.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TldLegalAgreementInner> listAgreements(
String name, TopLevelDomainAgreementOption agreementOption, Context context) {
return new PagedIterable<>(listAgreementsAsync(name, agreementOption, context));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return collection of Top-level domains.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TopLevelDomainInner>> listNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<TopLevelDomainInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return collection of Top-level domains.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TopLevelDomainInner>> listNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return collection of top-level domain legal agreements.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TldLegalAgreementInner>> listAgreementsNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(context -> service.listAgreementsNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<TldLegalAgreementInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return collection of top-level domain legal agreements.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<TldLegalAgreementInner>> listAgreementsNextSinglePageAsync(
String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listAgreementsNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| |
/* $This file is distributed under the terms of the license in LICENSE$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelChangedListener;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
import org.apache.jena.rdf.model.RDFNode;
/**
* An RDFServiceFactory that always returns the same RDFService object
* @author bjl23
*
*/
public class RDFServiceFactorySingle implements RDFServiceFactory {
private RDFService rdfService;
public RDFServiceFactorySingle(RDFService rdfService) {
this.rdfService = new UnclosableRDFService(rdfService);
}
@Override
public RDFService getRDFService() {
return this.rdfService;
}
@Override
public RDFService getShortTermRDFService() {
return this.rdfService;
}
@Override
public void registerListener(ChangeListener listener) throws RDFServiceException {
this.rdfService.registerListener(listener);
}
@Override
public void unregisterListener(ChangeListener listener) throws RDFServiceException {
this.rdfService.unregisterListener(listener);
}
@Override
public void registerJenaModelChangedListener(ModelChangedListener listener) throws RDFServiceException {
this.rdfService.registerJenaModelChangedListener(listener);
}
@Override
public void unregisterJenaModelChangedListener(ModelChangedListener listener) throws RDFServiceException {
this.rdfService.unregisterJenaModelChangedListener(listener);
}
public class UnclosableRDFService implements RDFService {
private RDFService s;
public UnclosableRDFService(RDFService rdfService) {
this.s = rdfService;
}
@Override
public boolean changeSetUpdate(ChangeSet changeSet)
throws RDFServiceException {
return s.changeSetUpdate(changeSet);
}
@Override
public void newIndividual(String individualURI, String individualTypeURI)
throws RDFServiceException {
s.newIndividual(individualURI, individualTypeURI);
}
@Override
public void newIndividual(String individualURI,
String individualTypeURI, String graphURI)
throws RDFServiceException {
s.newIndividual(individualURI, individualTypeURI, graphURI);
}
@Override
public InputStream sparqlConstructQuery(String query,
ModelSerializationFormat resultFormat)
throws RDFServiceException {
return s.sparqlConstructQuery(query, resultFormat);
}
@Override
public void sparqlConstructQuery(String query, Model model)
throws RDFServiceException {
s.sparqlConstructQuery(query, model);
}
@Override
public InputStream sparqlDescribeQuery(String query,
ModelSerializationFormat resultFormat)
throws RDFServiceException {
return s.sparqlDescribeQuery(query, resultFormat);
}
@Override
public InputStream sparqlSelectQuery(String query,
ResultFormat resultFormat) throws RDFServiceException {
return s.sparqlSelectQuery(query, resultFormat);
}
@Override
public void sparqlSelectQuery(String query, ResultSetConsumer consumer) throws RDFServiceException {
s.sparqlSelectQuery(query, consumer);
}
@Override
public boolean sparqlAskQuery(String query) throws RDFServiceException {
return s.sparqlAskQuery(query);
}
@Override
public List<String> getGraphURIs() throws RDFServiceException {
return s.getGraphURIs();
}
@Override
public void getGraphMetadata() throws RDFServiceException {
s.getGraphMetadata();
}
@Override
public String getDefaultWriteGraphURI() throws RDFServiceException {
return s.getDefaultWriteGraphURI();
}
@Override
public void serializeAll(OutputStream outputStream)
throws RDFServiceException {
s.serializeAll(outputStream);
}
@Override
public void serializeGraph(String graphURI, OutputStream outputStream)
throws RDFServiceException {
s.serializeGraph(graphURI, outputStream);
}
@Override
public boolean isEquivalentGraph(String graphURI,
InputStream serializedGraph,
ModelSerializationFormat serializationFormat) throws RDFServiceException {
return s.isEquivalentGraph(graphURI, serializedGraph, serializationFormat);
}
@Override
public boolean isEquivalentGraph(String graphURI,
Model graph) throws RDFServiceException {
return s.isEquivalentGraph(graphURI, graph);
}
@Override
public void registerListener(ChangeListener changeListener)
throws RDFServiceException {
s.registerListener(changeListener);
}
@Override
public void unregisterListener(ChangeListener changeListener)
throws RDFServiceException {
s.unregisterListener(changeListener);
}
@Override
public void registerJenaModelChangedListener(ModelChangedListener changeListener)
throws RDFServiceException {
s.registerJenaModelChangedListener(changeListener);
}
@Override
public void unregisterJenaModelChangedListener(ModelChangedListener changeListener)
throws RDFServiceException {
s.unregisterJenaModelChangedListener(changeListener);
}
@Override
public ChangeSet manufactureChangeSet() {
return s.manufactureChangeSet();
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
return s.countTriples(subject, predicate, object);
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
return s.getTriples(subject, predicate, object, limit, offset);
}
@Override
public boolean preferPreciseOptionals() {
return s.preferPreciseOptionals();
}
@Override
public void close() {
// Don't close s. It's being used by everybody.
}
@Override
public String toString() {
return ToString.simpleName(this) + "[" + ToString.hashHex(this)
+ ", inner=" + s + "]";
}
/*
* UQAM-Linguistic-Management Useful among other things to transport the linguistic context in the service
* (non-Javadoc)
* @see edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService#setVitroRequest(edu.cornell.mannlib.vitro.webapp.controller.VitroRequest)
*/
private VitroRequest vitroRequest;
public void setVitroRequest(VitroRequest vitroRequest) {
this.vitroRequest = vitroRequest;
}
public VitroRequest getVitroRequest() {
return vitroRequest;
}
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.core.util;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.olat.core.id.Identity;
import org.olat.core.id.UserConstants;
import org.olat.core.logging.AssertException;
/**
* enclosing_type Description: <br>
* helper class for formating Strings (not locale specific)
*
* @author Felix Jost
*/
public class StringHelper {
private static final NumberFormat numFormatter;
private static final String WHITESPACE_REGEXP = "^\\s*$";
private static final Pattern WHITESPACE_PATTERN = Pattern.compile(WHITESPACE_REGEXP);
private static final Pattern p1 = Pattern.compile("\\+");
private static final Pattern p2 = Pattern.compile("%2F");
/**
* regex for not allowing <code>;,:</code> <code>ALL_WITHOUT_COMMA_2POINT_STRPNT</code>
*/
public static final String ALL_WITHOUT_COMMA_2POINT_STRPNT = "^[^,;:]*$";
private static final Pattern ALL_WITHOUT_COMMA_2POINT_STRPNT_PATTERN = Pattern.compile(ALL_WITHOUT_COMMA_2POINT_STRPNT);
private static final String X_MAC_ENC = "x-mac-";
private static final String MAC_ENC = "mac";
static {
DecimalFormatSymbols dfs = new DecimalFormatSymbols();
dfs.setDecimalSeparator('.');
numFormatter = new DecimalFormat("#.#", dfs);
}
/**
* unused
*
* @param in
* @param delim
* @return List
*/
public static List<String> getParts(String in, String delim) {
List<String> li = new ArrayList<String>();
String part;
int delimlen = delim.length();
int oldpos = 0;
int k;
while ((k = in.indexOf(delim, oldpos)) != -1) {
part = in.substring(oldpos, k);
li.add(part);
oldpos = k + delimlen;
}
if (oldpos != 0) { // min. ein Trennzeichen -> nimm rest
part = in.substring(oldpos);
li.add(part);
}
return li;
}
/**
* @param date
* @param locale
* @return formatted date
*/
public static String formatLocaleDate(long date, Locale locale) {
if (date == -1) return "-";
return DateFormat.getDateInstance(DateFormat.SHORT, locale).format(new Date(date));
}
/**
* @param date
* @param locale
* @return formatted date
*/
public static String formatLocaleDateFull(long date, Locale locale) {
if (date == -1) return "-";
return DateFormat.getDateInstance(DateFormat.FULL, locale).format(new Date(date));
}
/**
* @param date
* @param locale
* @return formatted date/time
*/
public static String formatLocaleDateTime(long date, Locale locale) {
if (date == -1) return "-";
return DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, locale).format(new Date(date));
}
/**
* @param time
* @param locale
* @return formatted time
*/
public static String formatLocaleTime(long time, Locale locale) {
if (time == -1) return "-";
return DateFormat.getTimeInstance(DateFormat.SHORT, locale).format(new Date(time));
}
/**
* @param mem
* @return formatted memory
*/
public static String formatMemory(long mem) {
long kb = mem / 1024;
long mb = kb / 1024;
if (mb > 0) return mb + " MB";
else if (kb > 0) return kb + " KB";
else return mem + " B";
}
/**
* @param f
* @param fractionDigits
* @return formatted float
*/
public static String formatFloat(float f, int fractionDigits) {
numFormatter.setMaximumFractionDigits(fractionDigits);
return numFormatter.format(f);
}
/**
* @param url
* @return encoded string
*/
public static String urlEncodeISO88591(String url) {
String part;
try {
part = URLEncoder.encode(url, "iso-8859-1");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("encoding failed (iso-8859-1) for :" + url);
}
return part;
}
/**
* @param url
* @return encoded string
*/
public static String urlEncodeUTF8(String url) {
String encodedURL;
try {
encodedURL = URLEncoder.encode(url, "UTF-8");
} catch (UnsupportedEncodingException e) {
/*
* from java.nio.Charset Standard charsets Every implementation of the Java platform is required to support the following standard charsets... ... UTF-8
* Eight-bit UCS Transformation Format ...
*/
throw new AssertException("utf-8 encoding is needed for proper encoding, but not offered on this java platform????");
}
encodedURL = p1.matcher(encodedURL).replaceAll("%20");
encodedURL = p2.matcher(encodedURL).replaceAll("/");
return encodedURL;
}
/**
* Converts all keys of a hash map to a string array.
*
* @param m The (hash) map with the key and values
* @return The string array containing all keys for this map
*/
public static String[] getMapKeysAsStringArray(Map m) {
return (String[]) m.keySet().toArray(new String[m.size()]);
}
/**
* Converts all values of a hash map to a string array.
*
* @param m The (hash) map with the key and values
* @return The string array containing all values for this map
*/
public static String[] getMapValuesAsStringArray(Map m) {
return (String[]) m.values().toArray(new String[m.size()]);
}
/**
* matches any but ^[^,;:]*$
*
* @param s
* @return true if does not match regexp
*/
public static boolean containsNoneOfCoDouSemi(String s) {
if (s == null) return false;
Matcher m = ALL_WITHOUT_COMMA_2POINT_STRPNT_PATTERN.matcher(s);
return m.find();
}
/**
* Checks if a string has anything in it to display. Will return true if the string is not null and does contain at least one none-whitespace character.
*
* @param s The String to be evaluated
* @return true if the string contains any non-whitespace character, false otherwhise
*/
public static boolean containsNonWhitespace(String s) {
if (s == null) return false;
Matcher matcher = WHITESPACE_PATTERN.matcher(s);
// if string matches whitespace pattern then string does not
// contain non-whitespace
return !matcher.find();
}
/**
* takes an array of Identies and converts them to a String containing the Identity-Emails separated by a <b>, </b>. The returned String can be fed directly to the
* e-mailer helper as the e-mail to field. <br>
* <ul>
* <li>Entries in the parameter emailRecipientIdentites are expected to be not null.</li>
* </ul>
*
* @param emailRecipientIdentities
* @return "email1, email2, email3," or null if emailRecipientIdentites was null
*/
public static String formatIdentitesAsEmailToString(final Identity[] emailRecipientIdentities) {
int elCnt = emailRecipientIdentities.length;
// 2..n recipients
StringBuilder tmpDET = new StringBuilder();
for (int i = 0; i < elCnt; i++) {
tmpDET.append(emailRecipientIdentities[i].getUser().getProperty(UserConstants.EMAIL, null));
if (i < elCnt - 1) {
tmpDET.append(", ");
}
}
return tmpDET.toString();
}
/**
* takes a List containing email Strings and converts them to a String containing the Email Strings separated by a <b>, </b>. The returned String can be fed directly
* to the e-mailer helper as the e-mail to field. <br>
* <ul>
* <li>Entries in the parameter emailRecipients are expected to be not null and of Type String.</li>
* </ul>
*
* @param emailRecipients
* @param delimiter
* @return "email1, email2, email3," or null if emailRecipientIdentites was null
*/
public static String formatIdentitesAsEmailToString(final List emailRecipients, String delimiter) {
int elCnt = emailRecipients.size();
// 2..n recipients
StringBuilder tmpDET = new StringBuilder();
for (int i = 0; i < elCnt; i++) {
tmpDET.append((String) emailRecipients.get(i));
if (i < elCnt - 1) {
tmpDET.append(delimiter);
}
}
return tmpDET.toString();
}
/**
* @param cellValue
* @return stripped string
*/
public static String stripLineBreaks(String cellValue) {
cellValue = cellValue.replace('\n', ' ');
cellValue = cellValue.replace('\r', ' ');
return cellValue;
}
/**
* transforms a displayname to a name that causes no problems on the filesystem (e.g. Webclass Energie 2004/2005 -> Webclass_Energie_2004_2005)
*
* @param s
* @return transformed string
*/
public static String transformDisplayNameToFileSystemName(String s) {
s = s.replace('?', '_');
s = s.replace('/', '_');
s = s.replace(' ', '_');
return s;
}
/**
* @param extractedCharset
* @return
*/
public static String check4xMacRoman(String extractedCharset) {
// OLAT-1844
// TODO:pb: why do http encoding names not match java encoding names?
// the encoding name 'x-mac-roman' must be translated to javas 'x-MacRoman'
// but it must be x-mac-roman for browser and htmleditor.. weird naming problem.
if (extractedCharset == null) return null;
if (extractedCharset.toLowerCase().startsWith(X_MAC_ENC)) {
String tmp = extractedCharset.substring(6);
String first = tmp.substring(0, 1);
tmp = tmp.substring(1);
// e.g. convert 'x-mac-roman' to 'x-MacRoman'
extractedCharset = "x-Mac" + first.toUpperCase() + tmp;
return extractedCharset;
} else if (extractedCharset.toLowerCase().startsWith(MAC_ENC)) {
// word for macintosh creates charset=macintosh which java does not know, load with iso-8859-1
return "iso-8859-1";
}
return extractedCharset;
}
/**
* set of strings to one string comma separated.<br>
* e.g. ["a","b","c","s"] -> "a,b,c,s"
*
* @param selection
* @return
*/
public static String formatAsCSVString(Set<String> entries) {
boolean isFirst = true;
String csvStr = null;
for (Iterator<String> iter = entries.iterator(); iter.hasNext();) {
String group = iter.next();
if (isFirst) {
csvStr = group;
isFirst = false;
} else {
csvStr += ", " + group;
}
}
return csvStr;
}
/**
* list of strings to one string comma separated.<br>
* e.g. ["a","b","c","s"] -> "a,b,c,s"
*
* @param selection
* @return
*/
public static String formatAsCSVString(List<String> entries) {
boolean isFirst = true;
String csvStr = null;
for (Iterator<String> iter = entries.iterator(); iter.hasNext();) {
String group = iter.next();
if (isFirst) {
csvStr = group;
isFirst = false;
} else {
csvStr += ", " + group;
}
}
return csvStr;
}
/**
* list of strings to one string comma separated.<br>
* e.g. ["z","a","b","c","s","a"] -> "a, b, c, s, z" No duplicates, alphabetically sorted
*
* @param selection
* @return
*/
public static String formatAsSortUniqCSVString(List<String> s) {
Map<String, String> u = new HashMap<String, String>();
for (Iterator<String> si = s.iterator(); si.hasNext();) {
u.put(si.next().trim(), null);
}
List<String> rv = new ArrayList<String>();
rv.addAll(u.keySet());
rv.remove("");
Collections.sort(rv);
return formatAsCSVString(rv);
}
/**
* list of strings to one string comma separated.<br>
* e.g. ["z","a","b","c","s","a"] -> "a, b, c, s, z" No duplicates, alphabetically sorted
*
* @param selection
* @return
*/
public static String formatAsSortUniqCSVString(Set<String> s) {
Map<String, String> u = new HashMap<String, String>();
for (Iterator<String> si = s.iterator(); si.hasNext();) {
u.put(si.next().trim(), null);
}
List<String> rv = new ArrayList<String>();
rv.addAll(u.keySet());
rv.remove("");
Collections.sort(rv);
return formatAsCSVString(rv);
}
}
| |
/*
* arcus-java-client : Arcus Java client
* Copyright 2010-2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.spy.memcached;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.SocketChannel;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import net.spy.memcached.auth.AuthDescriptor;
import net.spy.memcached.compat.SpyObject;
import net.spy.memcached.ops.Operation;
import net.spy.memcached.protocol.ascii.AsciiMemcachedNodeImpl;
import net.spy.memcached.protocol.ascii.AsciiOperationFactory;
import net.spy.memcached.protocol.binary.BinaryMemcachedNodeImpl;
import net.spy.memcached.protocol.binary.BinaryOperationFactory;
import net.spy.memcached.transcoders.CollectionTranscoder;
import net.spy.memcached.transcoders.SerializingTranscoder;
import net.spy.memcached.transcoders.Transcoder;
/**
* Default implementation of ConnectionFactory.
*
* <p>
* This implementation creates connections where the operation queue is an
* ArrayBlockingQueue and the read and write queues are unbounded
* LinkedBlockingQueues. The <code>Redistribute</code> FailureMode is always
* used. If other FailureModes are needed, look at the
* ConnectionFactoryBuilder.
*
* </p>
*/
public class DefaultConnectionFactory extends SpyObject
implements ConnectionFactory {
/**
* Default failure mode.
*/
public static final FailureMode DEFAULT_FAILURE_MODE =
FailureMode.Redistribute;
/**
* Default hash algorithm.
*/
public static final HashAlgorithm DEFAULT_HASH = HashAlgorithm.NATIVE_HASH;
/**
* Maximum length of the operation queue returned by this connection
* factory.
*/
public static final int DEFAULT_OP_QUEUE_LEN=16384;
/**
* The maximum time to block waiting for op queue operations to complete,
* in milliseconds. The default has been set with the expectation that
* most requests are interactive and waiting for more than a few seconds
* is thus more undesirable than failing the request.
*/
public static final long DEFAULT_OP_QUEUE_MAX_BLOCK_TIME =
TimeUnit.SECONDS.toMillis(10);
/**
* The read buffer size for each server connection from this factory.
*/
public static final int DEFAULT_READ_BUFFER_SIZE=16384;
/**
* Default operation timeout in milliseconds.
*
* operation timeout : 700ms
* It avoids the occurence of operation timeout
* even if two packet retransmissions exist in linux.
*
*/
public static final long DEFAULT_OPERATION_TIMEOUT = 700L;
/**
* Maximum amount of time (in seconds) to wait between reconnect attempts.
*/
public static final long DEFAULT_MAX_RECONNECT_DELAY = 30;
/**
* Maximum number + 2 of timeout exception for shutdown connection
*/
public static final int DEFAULT_MAX_TIMEOUTEXCEPTION_THRESHOLD = 998;
/**
* Maximum number of Front cache elements
*/
public static final int DEFAULT_MAX_FRONTCACHE_ELEMENTS = 0;
/**
* Maximum number of Front cache elements
*/
public static final int DEFAULT_FRONTCACHE_EXPIRETIME = 5;
/**
* Default bulk service thread count
*/
public static final int DEFAULT_BULKSERVICE_THREAD_COUNT = 1;
/**
* Default bulk service loop limit
*/
public static final int DEFAULT_BULKSERVICE_LOOP_LIMIT = 1;
/**
* Default bulk service single operation timeout
*
* operation timeout : 700ms
* It avoids the occurence of operation timeout
* even if two packet retransmissions exist in linux.
*
*/
public static final long DEFAULT_BULKSERVICE_SINGLE_OP_TIMEOUT = 700L;
/**
* Max smget key chunk size per request
*/
public static final int DEFAULT_MAX_SMGET_KEY_CHUNK_SIZE = 500;
/**
* Default front cache name
*/
private static final String DEFAULT_FRONT_CACHE_NAME = "ArcusFrontCache" + new Object().hashCode();
private final int opQueueLen;
private final int readBufSize;
private final HashAlgorithm hashAlg;
/**
* Construct a DefaultConnectionFactory with the given parameters.
*
* @param qLen the queue length.
* @param bufSize the buffer size
* @param hash the algorithm to use for hashing
*/
public DefaultConnectionFactory(int qLen, int bufSize, HashAlgorithm hash) {
super();
opQueueLen=qLen;
readBufSize=bufSize;
hashAlg=hash;
}
/**
* Create a DefaultConnectionFactory with the given maximum operation
* queue length, and the given read buffer size.
*/
public DefaultConnectionFactory(int qLen, int bufSize) {
this(qLen, bufSize, DEFAULT_HASH);
}
/**
* Create a DefaultConnectionFactory with the default parameters.
*/
public DefaultConnectionFactory() {
this(DEFAULT_OP_QUEUE_LEN, DEFAULT_READ_BUFFER_SIZE);
}
public MemcachedNode createMemcachedNode(SocketAddress sa,
SocketChannel c, int bufSize) {
OperationFactory of = getOperationFactory();
if(of instanceof AsciiOperationFactory) {
return new AsciiMemcachedNodeImpl(sa, c, bufSize,
createReadOperationQueue(),
createWriteOperationQueue(),
createOperationQueue(),
getOpQueueMaxBlockTime());
} else if(of instanceof BinaryOperationFactory) {
boolean doAuth = false;
if (getAuthDescriptor() != null) {
doAuth = true;
}
return new BinaryMemcachedNodeImpl(sa, c, bufSize,
createReadOperationQueue(),
createWriteOperationQueue(),
createOperationQueue(),
getOpQueueMaxBlockTime(),
doAuth);
} else {
throw new IllegalStateException(
"Unhandled operation factory type " + of);
}
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#createConnection(java.util.List)
*/
public MemcachedConnection createConnection(List<InetSocketAddress> addrs)
throws IOException {
return new MemcachedConnection(getReadBufSize(), this, addrs,
getInitialObservers(), getFailureMode(), getOperationFactory());
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getFailureMode()
*/
public FailureMode getFailureMode() {
return DEFAULT_FAILURE_MODE;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#createOperationQueue()
*/
public BlockingQueue<Operation> createOperationQueue() {
return new ArrayBlockingQueue<Operation>(getOpQueueLen());
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#createReadOperationQueue()
*/
public BlockingQueue<Operation> createReadOperationQueue() {
return new LinkedBlockingQueue<Operation>();
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#createWriteOperationQueue()
*/
public BlockingQueue<Operation> createWriteOperationQueue() {
return new LinkedBlockingQueue<Operation>();
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#createLocator(java.util.List)
*/
public NodeLocator createLocator(List<MemcachedNode> nodes) {
return new ArrayModNodeLocator(nodes, getHashAlg());
}
/**
* Get the op queue length set at construct time.
*/
public int getOpQueueLen() {
return opQueueLen;
}
/**
* @return the maximum time to block waiting for op queue operations to
* complete, in milliseconds, or null for no waiting.
*/
public long getOpQueueMaxBlockTime() {
return DEFAULT_OP_QUEUE_MAX_BLOCK_TIME;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getReadBufSize()
*/
public int getReadBufSize() {
return readBufSize;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getHashAlg()
*/
public HashAlgorithm getHashAlg() {
return hashAlg;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getOperationFactory()
*/
public OperationFactory getOperationFactory() {
return new AsciiOperationFactory();
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getOperationTimeout()
*/
public long getOperationTimeout() {
return DEFAULT_OPERATION_TIMEOUT;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#isDaemon()
*/
public boolean isDaemon() {
return false;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getInitialObservers()
*/
public Collection<ConnectionObserver> getInitialObservers() {
return Collections.emptyList();
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getDefaultTranscoder()
*/
public Transcoder<Object> getDefaultTranscoder() {
return new SerializingTranscoder();
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getDefaultCollectionTranscoder()
*/
public Transcoder<Object> getDefaultCollectionTranscoder() {
return new CollectionTranscoder();
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#useNagleAlgorithm()
*/
public boolean useNagleAlgorithm() {
return false;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#shouldOptimize()
*/
public boolean shouldOptimize() {
return true;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getMaxReconnectDelay()
*/
public long getMaxReconnectDelay() {
return DEFAULT_MAX_RECONNECT_DELAY;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getAuthDescriptor()
*/
public AuthDescriptor getAuthDescriptor() {
return null;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getTimeoutExceptionThreshold()
*/
public int getTimeoutExceptionThreshold() {
return DEFAULT_MAX_TIMEOUTEXCEPTION_THRESHOLD;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getMaxFrontCacheElements()
*/
public int getMaxFrontCacheElements() {
return DEFAULT_MAX_FRONTCACHE_ELEMENTS;
}
/* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getFrontCacheExpireTime()
*/
public int getFrontCacheExpireTime() {
return DEFAULT_FRONTCACHE_EXPIRETIME;
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getBulkServiceThreadCount()
*/
@Override
public int getBulkServiceThreadCount() {
return DEFAULT_BULKSERVICE_THREAD_COUNT;
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getBulkServiceLoopLimit()
*/
@Override
public int getBulkServiceLoopLimit() {
return DEFAULT_BULKSERVICE_LOOP_LIMIT;
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getBulkServiceSingleOpTimeout()
*/
@Override
public long getBulkServiceSingleOpTimeout() {
return DEFAULT_BULKSERVICE_SINGLE_OP_TIMEOUT;
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getDefaultMaxSMGetKeyChunkSize()
*/
@Override
public int getDefaultMaxSMGetKeyChunkSize() {
return DEFAULT_MAX_SMGET_KEY_CHUNK_SIZE;
}
/*
* (non-Javadoc)
* @see net.spy.memcached.ConnectionFactory#getFrontCacheName()
*/
@Override
public String getFrontCacheName() {
return DEFAULT_FRONT_CACHE_NAME;
}
}
| |
package cgeo.geocaching.settings;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.DataStore;
import cgeo.geocaching.Intents;
import cgeo.geocaching.R;
import cgeo.geocaching.SelectMapfileActivity;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.apps.cache.navi.NavigationAppFactory;
import cgeo.geocaching.apps.cache.navi.NavigationAppFactory.NavigationAppsEnum;
import cgeo.geocaching.connector.gc.GCConnector;
import cgeo.geocaching.files.SimpleDirChooser;
import cgeo.geocaching.maps.MapProviderFactory;
import cgeo.geocaching.maps.interfaces.MapSource;
import cgeo.geocaching.network.AndroidBeam;
import cgeo.geocaching.sensors.Sensors;
import cgeo.geocaching.utils.DatabaseBackupUtils;
import cgeo.geocaching.utils.DebugUtils;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.RxUtils;
import org.apache.commons.lang3.StringUtils;
import org.openintents.intents.FileManagerIntents;
import rx.functions.Action0;
import rx.schedulers.Schedulers;
import android.app.ProgressDialog;
import android.app.backup.BackupManager;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceChangeListener;
import android.preference.Preference.OnPreferenceClickListener;
import android.preference.PreferenceActivity;
import android.preference.PreferenceManager;
import android.preference.PreferenceScreen;
import android.widget.BaseAdapter;
import android.widget.ListAdapter;
import java.io.File;
import java.util.List;
import java.util.Locale;
/**
* A {@link PreferenceActivity} that presents a set of application settings. On
* handset devices, settings are presented as a single list. On tablets,
* settings are split by category, with category headers shown to the left of
* the list of settings.
* <p>
* See <a href="http://developer.android.com/design/patterns/settings.html"> Android Design: Settings</a> for design
* guidelines and the <a href="http://developer.android.com/guide/topics/ui/settings.html">Settings API Guide</a> for
* more information on developing a Settings UI.
*
*/
public class SettingsActivity extends PreferenceActivity {
private static final String INTENT_OPEN_SCREEN = "OPEN_SCREEN";
public static final int NO_RESTART_NEEDED = 1;
public static final int RESTART_NEEDED = 2;
/**
* Enumeration for directory choosers. This is how we can retrieve information about the
* directory and preference key in onActivityResult() easily just by knowing
* the result code.
*/
private enum DirChooserType {
GPX_IMPORT_DIR(1, R.string.pref_gpxImportDir,
Environment.getExternalStorageDirectory().getPath() + "/gpx", false),
GPX_EXPORT_DIR(2, R.string.pref_gpxExportDir,
Environment.getExternalStorageDirectory().getPath() + "/gpx", true),
THEMES_DIR(3, R.string.pref_renderthemepath, "", false);
public final int requestCode;
public final int keyId;
public final String defaultValue;
public final boolean writeMode;
DirChooserType(final int requestCode, final int keyId, final String defaultValue, final boolean writeMode) {
this.requestCode = requestCode;
this.keyId = keyId;
this.defaultValue = defaultValue;
this.writeMode = writeMode;
}
}
@Override
protected void onCreate(final Bundle savedInstanceState) {
// Set light skin in preferences only for devices > 2.x, it doesn't work under 2.x
setTheme(Settings.isLightSkin() && Build.VERSION.SDK_INT > 10 ? R.style.settings_light : R.style.settings);
super.onCreate(savedInstanceState);
initDeviceSpecificPreferences();
initUnitPreferences();
addPreferencesFromResource(this, R.xml.preferences);
initPreferences();
final Intent intent = getIntent();
openInitialScreen(intent.getIntExtra(INTENT_OPEN_SCREEN, 0));
AndroidBeam.disable(this);
setResult(NO_RESTART_NEEDED);
}
private void openInitialScreen(final int initialScreen) {
if (initialScreen == 0) {
return;
}
final PreferenceScreen screen = (PreferenceScreen) getPreference(initialScreen);
if (screen == null) {
return;
}
try {
setPreferenceScreen(screen);
} catch (final RuntimeException e) {
Log.e("could not open preferences " + initialScreen, e);
}
}
@Override
protected void onPause() {
Log.i("Requesting settings backup with settings manager");
BackupManager.dataChanged(getPackageName());
super.onPause();
}
private void initPreferences() {
initMapSourcePreference();
initDirChoosers();
initDefaultNavigationPreferences();
initBackupButtons();
initDbLocationPreference();
initGeoDirPreferences();
initDebugPreference();
initBasicMemberPreferences();
initSend2CgeoPreferences();
initServicePreferences();
initNavigationMenuPreferences();
initLanguagePreferences();
initMaintenanceButtons();
for (final int k : new int[] { R.string.pref_username, R.string.pref_password,
R.string.pref_pass_vote, R.string.pref_signature,
R.string.pref_mapsource, R.string.pref_renderthemepath,
R.string.pref_gpxExportDir, R.string.pref_gpxImportDir,
R.string.pref_mapDirectory, R.string.pref_defaultNavigationTool,
R.string.pref_defaultNavigationTool2, R.string.pref_webDeviceName,
R.string.pref_fakekey_preference_backup_info, R.string.pref_twitter_cache_message, R.string.pref_twitter_trackable_message,
R.string.pref_ecusername, R.string.pref_ecpassword, R.string.pref_ec_icons }) {
bindSummaryToStringValue(k);
}
}
private void initNavigationMenuPreferences() {
for (final NavigationAppsEnum appEnum : NavigationAppsEnum.values()) {
if (appEnum.app.isInstalled()) {
getPreference(appEnum.preferenceKey).setEnabled(true);
}
}
getPreference(R.string.preference_screen_basicmembers)
.setEnabled(!Settings.isGCPremiumMember());
redrawScreen(R.string.preference_screen_navigation_menu);
}
private void initServicePreferences() {
for (final OCPreferenceKeys key : OCPreferenceKeys.values()) {
getPreference(key.isActivePrefId).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(key.websitePrefId, key.authParams.host);
getPreference(key.prefScreenId).setSummary(getServiceSummary(Settings.isOCConnectorActive(key.isActivePrefId)));
}
getPreference(R.string.pref_connectorGCActive).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(R.string.pref_fakekey_gc_website, GCConnector.getInstance().getHost());
getPreference(R.string.preference_screen_gc).setSummary(getServiceSummary(Settings.isGCConnectorActive()));
getPreference(R.string.pref_connectorOXActive).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(R.string.pref_fakekey_ox_website, "opencaching.com");
getPreference(R.string.preference_screen_ox).setSummary(getServiceSummary(Settings.isOXConnectorActive()));
getPreference(R.string.pref_connectorECActive).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(R.string.pref_fakekey_ec_website, "extremcaching.com");
getPreference(R.string.preference_screen_ec).setSummary(getServiceSummary(Settings.isECConnectorActive()));
getPreference(R.string.pref_ratingwanted).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(R.string.pref_fakekey_gcvote_website, "gcvote.com");
getPreference(R.string.preference_screen_gcvote).setSummary(getServiceSummary(Settings.isRatingWanted()));
getPreference(R.string.pref_connectorGeokretyActive).setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
setWebsite(R.string.pref_fakekey_geokrety_website, "geokrety.org");
setWebsite(R.string.pref_fakekey_geokretymap_website, "geokretymap.org");
setWebsite(R.string.pref_fakekey_geokrety_register, "geokrety.org/adduser.php");
getPreference(R.string.preference_screen_geokrety).setSummary(getServiceSummary(Settings.isGeokretyConnectorActive()));
setWebsite(R.string.pref_fakekey_sendtocgeo_website, "send2.cgeo.org");
getPreference(R.string.preference_screen_sendtocgeo).setSummary(getServiceSummary(Settings.isRegisteredForSend2cgeo()));
}
private void setWebsite(final int preferenceKey, final String host) {
final Preference preference = getPreference(preferenceKey);
preference.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
try {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http://" + host)));
} catch (final ActivityNotFoundException e) {
Log.e("Cannot find suitable activity", e);
ActivityMixin.showToast(SettingsActivity.this, R.string.err_application_no);
}
return true;
}
});
}
private static String getServiceSummary(final boolean status) {
return status ? CgeoApplication.getInstance().getString(R.string.settings_service_active) : StringUtils.EMPTY;
}
private static String getKey(final int prefKeyId) {
return CgeoApplication.getInstance().getString(prefKeyId);
}
private Preference getPreference(final int keyId) {
return findPreference(this, getKey(keyId));
}
/**
* Fill the choice list for map sources.
*/
private void initMapSourcePreference() {
final ListPreference pref = (ListPreference) getPreference(R.string.pref_mapsource);
final List<MapSource> mapSources = MapProviderFactory.getMapSources();
final CharSequence[] entries = new CharSequence[mapSources.size()];
final CharSequence[] values = new CharSequence[mapSources.size()];
for (int i = 0; i < mapSources.size(); ++i) {
entries[i] = mapSources.get(i).getName();
values[i] = String.valueOf(mapSources.get(i).getNumericalId());
}
pref.setEntries(entries);
pref.setEntryValues(values);
}
/**
* Fill the choice list for default navigation tools.
*/
private void initDefaultNavigationPreferences() {
final List<NavigationAppsEnum> apps = NavigationAppFactory.getInstalledDefaultNavigationApps();
final CharSequence[] entries = new CharSequence[apps.size()];
final CharSequence[] values = new CharSequence[apps.size()];
for (int i = 0; i < apps.size(); ++i) {
entries[i] = apps.get(i).toString();
values[i] = String.valueOf(apps.get(i).id);
}
final ListPreference defaultNavigationTool = (ListPreference) getPreference(R.string.pref_defaultNavigationTool);
defaultNavigationTool.setEntries(entries);
defaultNavigationTool.setEntryValues(values);
final ListPreference defaultNavigationTool2 = (ListPreference) getPreference(R.string.pref_defaultNavigationTool2);
defaultNavigationTool2.setEntries(entries);
defaultNavigationTool2.setEntryValues(values);
}
private void initDirChoosers() {
for (final DirChooserType dct : DirChooserType.values()) {
getPreference(dct.keyId).setOnPreferenceClickListener(
new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
startDirChooser(dct);
return false;
}
});
}
getPreference(R.string.pref_mapDirectory).setOnPreferenceClickListener(
new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
final Intent i = new Intent(SettingsActivity.this,
SelectMapfileActivity.class);
startActivityForResult(i, R.string.pref_mapDirectory);
return false;
}
});
}
/**
* Fire up a directory chooser on click on the preference.
*
* The result can be processed using {@link android.app.Activity#onActivityResult}.
*
* @param dct
* type of directory to be selected
*/
private void startDirChooser(final DirChooserType dct) {
final String startDirectory = Settings.getString(dct.keyId, dct.defaultValue);
try {
final Intent dirChooser = new Intent(FileManagerIntents.ACTION_PICK_DIRECTORY);
if (StringUtils.isNotBlank(startDirectory)) {
dirChooser.setData(Uri.fromFile(new File(startDirectory)));
}
dirChooser.putExtra(FileManagerIntents.EXTRA_TITLE,
getString(R.string.simple_dir_chooser_title));
dirChooser.putExtra(FileManagerIntents.EXTRA_BUTTON_TEXT,
getString(android.R.string.ok));
startActivityForResult(dirChooser, dct.requestCode);
} catch (final ActivityNotFoundException ignored) {
// OI file manager not available
final Intent dirChooser = new Intent(this, SimpleDirChooser.class);
dirChooser.putExtra(Intents.EXTRA_START_DIR, startDirectory);
dirChooser.putExtra(SimpleDirChooser.EXTRA_CHOOSE_FOR_WRITING, dct.writeMode);
startActivityForResult(dirChooser, dct.requestCode);
}
}
private void setChosenDirectory(final DirChooserType dct, final Intent data) {
final String directory = new File(data.getData().getPath()).getAbsolutePath();
if (StringUtils.isNotBlank(directory)) {
final Preference p = getPreference(dct.keyId);
if (p == null) {
return;
}
Settings.putString(dct.keyId, directory);
p.setSummary(directory);
}
}
private void initBackupButtons() {
final Preference backup = getPreference(R.string.pref_fakekey_preference_backup);
backup.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
DatabaseBackupUtils.createBackup(SettingsActivity.this, new Runnable() {
@Override
public void run() {
VALUE_CHANGE_LISTENER.onPreferenceChange(SettingsActivity.this.getPreference(R.string.pref_fakekey_preference_backup_info), "");
}
});
return true;
}
});
final Preference restore = getPreference(R.string.pref_fakekey_preference_restore);
restore.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
DatabaseBackupUtils.restoreDatabase(SettingsActivity.this);
return true;
}
});
}
private void initMaintenanceButtons() {
final Preference dirMaintenance = getPreference(R.string.pref_fakekey_preference_maintenance_directories);
dirMaintenance.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
// disable the button, as the cleanup runs in background and should not be invoked a second time
preference.setEnabled(false);
final Resources res = getResources();
final SettingsActivity activity = SettingsActivity.this;
final ProgressDialog dialog = ProgressDialog.show(activity, res.getString(R.string.init_maintenance), res.getString(R.string.init_maintenance_directories), true, false);
RxUtils.andThenOnUi(Schedulers.io(), new Action0() {
@Override
public void call() {
DataStore.removeObsoleteCacheDirectories();
}
}, new Action0() {
@Override
public void call() {
dialog.dismiss();
}
});
return true;
}
});
final Preference memoryDumpPref = getPreference(R.string.pref_memory_dump);
memoryDumpPref
.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(
final Preference preference) {
DebugUtils.createMemoryDump(SettingsActivity.this);
return true;
}
});
}
private static void initDeviceSpecificPreferences() {
// We have to ensure that those preferences are initialized so that devices with specific default values
// will get the appropriate ones.
Settings.setUseHardwareAcceleration(Settings.useHardwareAcceleration());
Settings.setUseGooglePlayServices(Settings.useGooglePlayServices());
}
private static void initUnitPreferences() {
Settings.setUseImperialUnits(Settings.useImperialUnits());
}
private void initDbLocationPreference() {
final Preference p = getPreference(R.string.pref_dbonsdcard);
p.setPersistent(false);
p.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
final boolean oldValue = Settings.isDbOnSDCard();
DataStore.moveDatabase(SettingsActivity.this);
return oldValue != Settings.isDbOnSDCard();
}
});
}
private void initDebugPreference() {
final Preference p = getPreference(R.string.pref_debug);
p.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(final Preference preference, final Object newValue) {
final boolean isDebug = (Boolean) newValue;
Log.setDebug(isDebug);
CgeoApplication.dumpOnOutOfMemory(isDebug);
return true;
}
});
}
private void initLanguagePreferences() {
final Preference p = getPreference(R.string.pref_useenglish);
p.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(final Preference preference, final Object newValue) {
setResult(RESTART_NEEDED);
return true;
}
});
}
private void initGeoDirPreferences() {
final Sensors sensors = Sensors.getInstance();
final Preference playServices = getPreference(R.string.pref_googleplayservices);
playServices.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(final Preference preference, final Object newValue) {
sensors.setupGeoDataObservables((Boolean) newValue, Settings.useLowPowerMode());
return true;
}
});
playServices.setEnabled(CgeoApplication.getInstance().isGooglePlayServicesAvailable());
getPreference(R.string.pref_lowpowermode).setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(final Preference preference, final Object newValue) {
final boolean useLowPower = ((Boolean) newValue).booleanValue();
sensors.setupGeoDataObservables(Settings.useGooglePlayServices(), useLowPower);
sensors.setupDirectionObservable(useLowPower);
return true;
}
});
}
void initBasicMemberPreferences() {
getPreference(R.string.preference_screen_basicmembers)
.setEnabled(!Settings.isGCPremiumMember());
getPreference(R.string.pref_loaddirectionimg)
.setEnabled(!Settings.isGCPremiumMember());
getPreference(R.string.pref_showcaptcha)
.setEnabled(!Settings.isGCPremiumMember());
redrawScreen(R.string.preference_screen_services);
}
/**
* Refresh a preference screen. Has no effect when called for a preference, that is not actually a preference
* screen.
*
* @param key
* Key of a preference screen.
*/
private void redrawScreen(final int key) {
final Preference preference = getPreference(key);
redrawScreen(preference);
}
private static void redrawScreen(final Preference preference) {
if (!(preference instanceof PreferenceScreen)) {
return;
}
final PreferenceScreen screen = (PreferenceScreen) preference;
final ListAdapter adapter = screen.getRootAdapter();
if (adapter instanceof BaseAdapter) {
((BaseAdapter) adapter).notifyDataSetChanged();
}
}
private static void initSend2CgeoPreferences() {
Settings.putString(R.string.pref_webDeviceName, Settings.getWebDeviceName());
}
public void setAuthTitle(final int prefKeyId) {
switch (prefKeyId) {
case R.string.pref_fakekey_ocde_authorization:
case R.string.pref_fakekey_ocpl_authorization:
case R.string.pref_fakekey_ocnl_authorization:
case R.string.pref_fakekey_ocus_authorization:
case R.string.pref_fakekey_ocro_authorization:
case R.string.pref_fakekey_ocuk_authorization:
setOCAuthTitle(OCPreferenceKeys.getByAuthId(prefKeyId));
break;
case R.string.pref_fakekey_twitter_authorization:
setTwitterAuthTitle();
break;
case R.string.pref_fakekey_geokrety_authorization:
setGeokretyAuthTitle();
break;
default:
Log.e(String.format(Locale.ENGLISH, "Invalid key %d in SettingsActivity.setTitle()", prefKeyId));
}
}
private void setOCAuthTitle(final OCPreferenceKeys key) {
if (key != null) {
getPreference(key.authPrefId)
.setTitle(getString(Settings.hasOCAuthorization(key.publicTokenPrefId, key.privateTokenPrefId)
? R.string.settings_reauthorize
: R.string.settings_authorize));
}
}
private void setTwitterAuthTitle() {
getPreference(R.string.pref_fakekey_twitter_authorization)
.setTitle(getString(Settings.hasTwitterAuthorization()
? R.string.settings_reauthorize
: R.string.settings_authorize));
}
void setGeokretyAuthTitle() {
getPreference(R.string.pref_fakekey_geokrety_authorization)
.setTitle(getString(Settings.hasGeokretyAuthorization()
? R.string.settings_reauthorize
: R.string.settings_authorize));
}
public static void openForScreen(final int preferenceScreenKey, final Context fromActivity) {
final Intent intent = new Intent(fromActivity, SettingsActivity.class);
intent.putExtra(INTENT_OPEN_SCREEN, preferenceScreenKey);
fromActivity.startActivity(intent);
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode != RESULT_OK) {
return;
}
for (final DirChooserType dct : DirChooserType.values()) {
if (requestCode == dct.requestCode) {
setChosenDirectory(dct, data);
return;
}
}
switch (requestCode) {
case R.string.pref_mapDirectory:
if (data.hasExtra(Intents.EXTRA_MAP_FILE)) {
final String mapFile = data.getStringExtra(Intents.EXTRA_MAP_FILE);
final File file = new File(mapFile);
if (!file.isDirectory()) {
Settings.setMapFile(mapFile);
if (!Settings.isValidMapFile(Settings.getMapFile())) {
ActivityMixin.showToast(this, R.string.warn_invalid_mapfile);
} else {
// Ensure map source preference is updated accordingly.
// TODO: There should be a better way to find and select the map source for a map file
final Integer mapSourceId = mapFile.hashCode();
final ListPreference mapSource = (ListPreference) getPreference(R.string.pref_mapsource);
mapSource.setValue(mapSourceId.toString());
VALUE_CHANGE_LISTENER.onPreferenceChange(mapSource, mapSourceId);
}
} else {
Settings.setMapFileDirectory(mapFile);
}
}
initMapSourcePreference();
getPreference(R.string.pref_mapDirectory).setSummary(StringUtils.defaultString(Settings.getMapFileDirectory()));
break;
case R.string.pref_fakekey_ocde_authorization:
case R.string.pref_fakekey_ocpl_authorization:
case R.string.pref_fakekey_ocnl_authorization:
case R.string.pref_fakekey_ocus_authorization:
case R.string.pref_fakekey_ocro_authorization:
case R.string.pref_fakekey_ocuk_authorization:
final OCPreferenceKeys key = OCPreferenceKeys.getByAuthId(requestCode);
if (key != null) {
setOCAuthTitle(key);
redrawScreen(key.prefScreenId);
}
break;
case R.string.pref_fakekey_twitter_authorization:
setTwitterAuthTitle();
redrawScreen(R.string.preference_screen_twitter);
break;
case R.string.pref_fakekey_geokrety_authorization:
setGeokretyAuthTitle();
redrawScreen(R.string.preference_screen_geokrety);
break;
default:
throw new IllegalArgumentException();
}
}
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
private static final Preference.OnPreferenceChangeListener VALUE_CHANGE_LISTENER = new Preference.OnPreferenceChangeListener() {
private PreferenceManager preferenceManager;
@Override
public boolean onPreferenceChange(final Preference preference, final Object value) {
preferenceManager = preference.getPreferenceManager();
final String stringValue = value.toString();
if (isPreference(preference, R.string.pref_mapsource)) {
// reset the cached map source
MapSource mapSource;
try {
final int mapSourceId = Integer.parseInt(stringValue);
mapSource = MapProviderFactory.getMapSource(mapSourceId);
} catch (final NumberFormatException e) {
Log.e("SettingsActivity.onPreferenceChange: bad source id '" + stringValue + "'", e);
mapSource = null;
}
// If there is no corresponding map source (because some map sources were
// removed from the device since) then use the first one available.
if (mapSource == null) {
mapSource = MapProviderFactory.getAnyMapSource();
if (mapSource == null) {
// There are no map source. There is little we can do here, except log an error and
// return to avoid triggering a null pointer exception.
Log.e("SettingsActivity.onPreferenceChange: no map source available");
return true;
}
}
Settings.setMapSource(mapSource);
preference.setSummary(mapSource.getName());
} else if (isPreference(preference, R.string.pref_connectorOCActive)
|| isPreference(preference, R.string.pref_connectorOCPLActive)
|| isPreference(preference, R.string.pref_connectorOCNLActive)
|| isPreference(preference, R.string.pref_connectorOCUSActive)
|| isPreference(preference, R.string.pref_connectorOCROActive)
|| isPreference(preference, R.string.pref_connectorOCUKActive)
|| isPreference(preference, R.string.pref_connectorGCActive)
|| isPreference(preference, R.string.pref_connectorOXActive)
|| isPreference(preference, R.string.pref_connectorECActive)) {
// update summary
final boolean boolVal = (Boolean) value;
final String summary = getServiceSummary(boolVal);
if (OCPreferenceKeys.isOCPreference(preference.getKey())) {
final OCPreferenceKeys prefKey = OCPreferenceKeys.getByKey(preference.getKey());
preference.getPreferenceManager().findPreference(getKey(prefKey.prefScreenId)).setSummary(summary);
} else if (isPreference(preference, R.string.pref_connectorGCActive)) {
preference.getPreferenceManager().findPreference(getKey(R.string.preference_screen_gc)).setSummary(summary);
} else if (isPreference(preference, R.string.pref_connectorOXActive)) {
preference.getPreferenceManager().findPreference(getKey(R.string.preference_screen_ox)).setSummary(summary);
} else if (isPreference(preference, R.string.pref_connectorECActive)) {
preference.getPreferenceManager().findPreference(getKey(R.string.preference_screen_ec)).setSummary(summary);
}
redrawScreen(preference.getPreferenceManager().findPreference(getKey(R.string.preference_screen_services)));
// reset log-in status if connector activation was changed
CgeoApplication.getInstance().forceRelog();
} else if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list.
final ListPreference listPreference = (ListPreference) preference;
final int index = listPreference.findIndexOfValue(stringValue);
// Set the summary to reflect the new value.
preference.setSummary(
index >= 0
? listPreference.getEntries()[index]
: null);
} else if (isPreference(preference, R.string.pref_fakekey_preference_backup_info)) {
final String text;
if (DatabaseBackupUtils.hasBackup()) {
text = preference.getContext().getString(R.string.init_backup_last) + " "
+ DatabaseBackupUtils.getBackupDateTime();
} else {
text = preference.getContext().getString(R.string.init_backup_last_no);
}
preference.setSummary(text);
} else if (isPreference(preference, R.string.pref_ratingwanted)) {
findPreference(R.string.preference_screen_gcvote).setSummary(getServiceSummary((Boolean) value));
redrawScreen(findPreference(R.string.preference_screen_services));
} else if (isPreference(preference, R.string.pref_connectorGeokretyActive)) {
findPreference(R.string.preference_screen_geokrety).setSummary(getServiceSummary((Boolean) value));
redrawScreen(findPreference(R.string.preference_screen_services));
} else {
// For all other preferences, set the summary to the value's
// simple string representation.
preference.setSummary(stringValue);
}
// TODO: do not special case geocaching.com here
if ((isPreference(preference, R.string.pref_username) && !stringValue.equals(Settings.getUsername())) || (isPreference(preference, R.string.pref_password) && !stringValue.equals(Settings.getGcCredentials().getRight()))) {
// reset log-in if gc user or password is changed
CgeoApplication.getInstance().forceRelog();
}
return true;
}
private Preference findPreference(final int preferenceKeyResourceId) {
return preferenceManager.findPreference(getKey(preferenceKeyResourceId));
}
};
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #VALUE_CHANGE_LISTENER
*/
private static void bindSummaryToValue(final Preference preference, final Object value) {
// Set the listener to watch for value changes.
if (preference == null) {
return;
}
preference.setOnPreferenceChangeListener(VALUE_CHANGE_LISTENER);
// Trigger the listener immediately with the preference's
// current value.
VALUE_CHANGE_LISTENER.onPreferenceChange(preference, value);
}
/**
* auto-care for the summary of the preference of string type with this key
*/
private void bindSummaryToStringValue(final int key) {
final Preference pref = getPreference(key);
if (pref == null) {
return;
}
final String value = PreferenceManager
.getDefaultSharedPreferences(pref.getContext())
.getString(pref.getKey(), "");
bindSummaryToValue(pref, value);
}
@SuppressWarnings("deprecation")
private static Preference findPreference(final PreferenceActivity preferenceActivity, final CharSequence key) {
return preferenceActivity.findPreference(key);
}
@SuppressWarnings("deprecation")
private static void addPreferencesFromResource(final PreferenceActivity preferenceActivity, final int preferencesResId) {
preferenceActivity.addPreferencesFromResource(preferencesResId);
}
@SuppressWarnings("deprecation")
@Override
public void setPreferenceScreen(final PreferenceScreen preferenceScreen) {
// TODO replace with fragment based code
super.setPreferenceScreen(preferenceScreen);
}
@SuppressWarnings("deprecation")
@Override
public PreferenceManager getPreferenceManager() {
// TODO replace with fragment based code
return super.getPreferenceManager();
}
private static boolean isPreference(final Preference preference, final int preferenceKeyId) {
return getKey(preferenceKeyId).equals(preference.getKey());
}
}
| |
package xyz.cloudkeeper.model.beans;
import org.eclipse.persistence.jaxb.JAXBContextFactory;
import org.testng.Assert;
import org.testng.ITest;
import org.testng.annotations.Test;
import xyz.cloudkeeper.model.Immutable;
import xyz.cloudkeeper.model.bare.BareLocatable;
import xyz.cloudkeeper.model.beans.element.MutableAnnotatedConstruct;
import xyz.cloudkeeper.model.beans.element.MutableQualifiedNamable;
import xyz.cloudkeeper.model.beans.element.MutableSimpleNameable;
import xyz.cloudkeeper.model.immutable.AnnotationValue;
import xyz.cloudkeeper.model.immutable.Location;
import xyz.cloudkeeper.model.immutable.element.Key;
import xyz.cloudkeeper.model.immutable.element.Name;
import xyz.cloudkeeper.model.immutable.element.NoKey;
import xyz.cloudkeeper.model.immutable.element.SimpleName;
import xyz.cloudkeeper.model.immutable.element.Version;
import xyz.cloudkeeper.model.immutable.execution.ExecutionTrace;
import xyz.cloudkeeper.model.util.ImmutableList;
import javax.annotation.Nullable;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
import javax.xml.transform.stream.StreamSource;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.StringReader;
import java.io.StringWriter;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.regex.Pattern;
public class MutableLocatableContract implements ITest {
private static final CopyOption[] NO_COPY_OPTIONS = new CopyOption[0];
private final Class<?> clazz;
private final ImmutableList<GetterSetterPair> getterSetterPairs;
private MutableLocatable<?> instance;
private MutableLocatable<?> copy;
private Method staticCopyMethod;
@SafeVarargs
public static MutableLocatableContract[] contractsFor(Class<? extends MutableLocatable<?>>... classes) {
MutableLocatableContract[] array = new MutableLocatableContract[classes.length];
for (int i = 0; i < classes.length; ++i) {
array[i] = new MutableLocatableContract(classes[i]);
}
return array;
}
public MutableLocatableContract(Class<? extends MutableLocatable<?>> clazz) {
this.clazz = clazz;
List<GetterSetterPair> newGetterSetterPairs = new ArrayList<>();
for (Method method: clazz.getMethods()) {
try {
Class<?>[] parameterTypes = method.getParameterTypes();
if (method.getName().startsWith("set") && parameterTypes.length == 1) {
Class<?> expectedReturnType = parameterTypes[0];
Method getter = Boolean.class.equals(expectedReturnType) || boolean.class.equals(expectedReturnType)
? clazz.getMethod("is" + method.getName().substring(3))
: clazz.getMethod("get" + method.getName().substring(3));
Class<?> getterReturnType = getter.getReturnType();
if (getterReturnType.equals(parameterTypes[0])) {
newGetterSetterPairs.add(new GetterSetterPair(getter, method));
}
}
} catch (NoSuchMethodException exception) {
Assert.fail(String.format(
"Could not find getter corresponding to setter method %s.", method
), exception);
}
}
getterSetterPairs = ImmutableList.copyOf(newGetterSetterPairs);
}
@Override
public String getTestName() {
return clazz.getSimpleName();
}
@Test
public void noArgsConstructor() throws ReflectiveOperationException {
instance = (MutableLocatable<?>) clazz.getConstructor().newInstance();
}
private static MutableLocatable<?> valueForMutableLocatable(Class<?> clazz) throws Exception {
Class<?> currentClass = clazz;
while (true) {
if (Modifier.isAbstract(currentClass.getModifiers())) {
XmlSeeAlso annotation = currentClass.getAnnotation(XmlSeeAlso.class);
Assert.assertNotNull(annotation, String.format("Missing @%s annotation on %s.",
XmlSeeAlso.class.getSimpleName(), currentClass));
currentClass = (Class<?>) annotation.value()[0];
} else {
return (MutableLocatable<?>) currentClass.getConstructor().newInstance();
}
}
}
private static final class ClassAndValue {
private final Class<?> clazz;
private final Object value;
private final Object equalsValue;
private ClassAndValue(Class<?> clazz, Object value, Object equalsValue) {
this.clazz = clazz;
this.value = value;
this.equalsValue = equalsValue;
}
private ClassAndValue(Class<?> clazz, Object value) {
this(clazz, value, value);
}
}
private static ClassAndValue valueForType(Class<?> rawClass, Type generic) throws Exception {
if (List.class.equals(rawClass)) {
Type listPropertyElementType = ((ParameterizedType) generic).getActualTypeArguments()[0];
Class<?> listPropertyElementRaw;
if (listPropertyElementType instanceof Class<?>) {
listPropertyElementRaw = (Class<?>) listPropertyElementType;
} else if (listPropertyElementType instanceof ParameterizedType) {
listPropertyElementRaw = (Class<?>) ((ParameterizedType) listPropertyElementType).getRawType();
} else {
Assert.fail(String.format("Expected generic type that is %s or %s, but got %s.",
Class.class.getSimpleName(), ParameterizedType.class.getSimpleName(), listPropertyElementType));
throw new AssertionError();
}
return new ClassAndValue(
List.class,
Collections.singletonList(valueForType(listPropertyElementRaw, listPropertyElementType).value)
);
} else if (rawClass.isEnum()) {
return new ClassAndValue(rawClass, rawClass.getEnumConstants()[0]);
} else if (Location.class.equals(rawClass)) {
return new ClassAndValue(rawClass, new Location("foo", 3, 24));
} else if (SimpleName.class.equals(rawClass)) {
return new ClassAndValue(String.class, "Bar", SimpleName.identifier("Bar"));
} else if (Name.class.equals(rawClass)) {
return new ClassAndValue(String.class, "com.foo.Bar", Name.qualifiedName("com.foo.Bar"));
} else if (Key.class.equals(rawClass)) {
return new ClassAndValue(Key.class, NoKey.instance());
} else if (Version.class.equals(rawClass)) {
return new ClassAndValue(rawClass, Version.valueOf("1.2.3"));
} else if (AnnotationValue.class.equals(rawClass)) {
return new ClassAndValue(Serializable.class, 34, AnnotationValue.of(34));
} else if (ExecutionTrace.class.equals(rawClass)) {
return new ClassAndValue(rawClass, ExecutionTrace.valueOf("/loop/value:in:num"));
} else if (MutableQualifiedNamable.class.equals(rawClass)) {
return new ClassAndValue(String.class, "com.foo.Bar",
new MutableQualifiedNamable().setQualifiedName("com.foo.Bar"));
} else if (MutableSimpleNameable.class.equals(rawClass)) {
return new ClassAndValue(String.class, "Bar", new MutableSimpleNameable().setSimpleName("Bar"));
} else if (MutableLocatable.class.isAssignableFrom(rawClass)) {
return new ClassAndValue(rawClass, valueForMutableLocatable(rawClass));
} else if (URI.class.equals(rawClass)) {
return new ClassAndValue(rawClass, URI.create("test:some.module"));
} else if (Pattern.class.equals(rawClass)) {
return new ClassAndValue(rawClass, Pattern.compile(".*"));
} else if (Date.class.equals(rawClass)) {
return new ClassAndValue(rawClass, new Date());
} else if (String.class.equals(rawClass)) {
return new ClassAndValue(rawClass, "baz");
} else if (byte[].class.equals(rawClass)) {
return new ClassAndValue(rawClass, new byte[] { (byte) 1, (byte) 2 });
} else if (Object.class.equals(rawClass)) {
return new ClassAndValue(rawClass, new Object());
} else {
// No other type is currently supported.
Assert.fail(String.format("Unexpected property type %s.", generic));
throw new AssertionError();
}
}
@Test(dependsOnMethods = "noArgsConstructor")
public void setters() throws Exception {
for (GetterSetterPair getterSetterPair: getterSetterPairs) {
Method getter = getterSetterPair.getter;
Method setter = getterSetterPair.setter;
Class<?> propertyClass = setter.getParameterTypes()[0];
Type propertyType = setter.getGenericParameterTypes()[0];
// Verify that null is a valid argument for list-property setters if and only if a property is initialized
// as null by the default constructor
if (List.class.isAssignableFrom(propertyClass)) {
if (getter.invoke(instance) == null) {
setter.invoke(instance, new Object[] { null });
Assert.assertNull(getter.invoke(instance));
} else {
try {
setter.invoke(instance, new Object[] { null });
Assert.fail("Expected exception.");
} catch (InvocationTargetException exception) {
Assert.assertTrue(exception.getCause() instanceof NullPointerException);
}
}
}
// Verify that calling setter works
ClassAndValue classAndValue = valueForType(propertyClass, propertyType);
clazz.getMethod(setter.getName(), classAndValue.clazz).invoke(instance, classAndValue.value);
// Verify that getter now returns the previously updated property
Object newValue = getter.invoke(instance);
Assert.assertEquals(newValue, classAndValue.equalsValue);
}
}
private static Class<?> getBareInterface(Class<?> clazz) {
for (Constructor<?> constructor: clazz.getDeclaredConstructors()) {
Class<?>[] parameterTypes = constructor.getParameterTypes();
if (parameterTypes.length == 2 && BareLocatable.class.isAssignableFrom(parameterTypes[0])) {
Class<?> bareInterface = parameterTypes[0];
Assert.assertTrue(bareInterface.getSimpleName().startsWith("Bare"));
return parameterTypes[0];
}
}
Assert.fail(String.format("Could not find copy constructor for %s.", clazz));
throw new AssertionError();
}
@Test(dependsOnMethods = "setters")
public void copyOf() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
Class<?> bareInterface = getBareInterface(clazz);
String simpleName = bareInterface.getSimpleName();
Class<?> superclass = clazz.getSuperclass();
String copyOfName = superclass.equals(MutableLocatable.class)
|| superclass.equals(MutableAnnotatedConstruct.class)
? "copyOf"
: "copyOf" + simpleName.substring("Bare".length());
staticCopyMethod = clazz.getMethod(copyOfName, bareInterface, CopyOption[].class);
// Verify that calling copyOf method with null returns null
Assert.assertNull(staticCopyMethod.invoke(null, null, NO_COPY_OPTIONS));
copy = (MutableLocatable<?>) staticCopyMethod.invoke(null, instance, NO_COPY_OPTIONS);
}
@Test(dependsOnMethods = "copyOf")
public void testEquals() {
Assert.assertEquals(instance, instance);
Assert.assertEquals(copy, instance);
Assert.assertEquals(instance, copy);
Assert.assertFalse(instance.equals(null));
Assert.assertFalse(instance.equals(new Object()));
Assert.assertEquals(copy.toString(), instance.toString());
}
private static boolean hasVisitorMethod(Class<?> clazz) {
for (Method method: clazz.getDeclaredMethods()) {
if (method.getName().startsWith("accept")) {
return true;
}
}
return false;
}
@Test(dependsOnMethods = "testEquals")
public void inheritedCopyOf()
throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
Class<?> currentClass = clazz.getSuperclass();
while (MutableLocatable.class.isAssignableFrom(currentClass) && !currentClass.equals(MutableLocatable.class)
&& !currentClass.equals(MutableAnnotatedConstruct.class)) {
Class<?> bareInterface = getBareInterface(currentClass);
if (hasVisitorMethod(bareInterface)) {
String abstractCopyOfName = "copyOf" + currentClass.getSimpleName().substring("Mutable".length());
Method abstractCopyMethod
= currentClass.getMethod(abstractCopyOfName, bareInterface, CopyOption[].class);
MutableLocatable<?> abstractCopy
= (MutableLocatable<?>) abstractCopyMethod.invoke(null, instance, NO_COPY_OPTIONS);
Assert.assertEquals(abstractCopy, instance);
}
currentClass = currentClass.getSuperclass();
}
}
/**
* Verifies that the copy method produces an object that does not share any transitive references to mutable
* objects.
*/
@Test(dependsOnMethods = "copyOf")
public void emptyObjectGraphIntersection() {
List<Object> duplicates = computeObjectGraphIntersection(instance, copy);
Assert.assertTrue(duplicates.isEmpty(), "Shared object references found.");
}
private static final class GetterSetterPair {
private final Method getter;
private final Method setter;
private GetterSetterPair(Method getter, Method setter) {
this.getter = getter;
this.setter = setter;
}
}
private void restoreNonSerializableProperties(MutableLocatable<?> deserialized, MutableLocatable<?> original)
throws IllegalAccessException, InvocationTargetException {
// Verify that properties of type Object have not been restored. Instead, restore manually.
for (GetterSetterPair getterSetterPair: getterSetterPairs) {
Method getter = getterSetterPair.getter;
Method setter = getterSetterPair.setter;
if (getter.getReturnType().equals(Object.class)) {
Assert.assertNull(getter.invoke(deserialized));
setter.invoke(deserialized, getter.invoke(original));
}
}
}
/**
* Verifies that Java serialization and deserialization works and gives equal results.
*/
@Test(dependsOnMethods = "setters")
public void serialization() throws Exception {
byte[] bytes;
try (
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)
) {
objectOutputStream.writeObject(instance);
objectOutputStream.close();
bytes = byteArrayOutputStream.toByteArray();
}
MutableLocatable<?> reconstructed;
try (
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream)
) {
reconstructed = (MutableLocatable<?>) objectInputStream.readObject();
}
restoreNonSerializableProperties(reconstructed, instance);
Assert.assertEquals(reconstructed, instance);
Assert.assertEquals(reconstructed.hashCode(), instance.hashCode());
}
@SuppressWarnings("unchecked")
private static <T> Object uncheckedMarshal(MutableLocatable<?> object, XmlAdapter<?, T> xmlAdapter)
throws Exception {
return xmlAdapter.marshal((T) object);
}
@SuppressWarnings("unchecked")
private static <T> JAXBElement<T> uncheckedNewJAXBElement(Class<T> clazz, Object value) {
return new JAXBElement<>(new QName("xml-test"), clazz, (T) value);
}
@SuppressWarnings("unchecked")
private static <T> MutableLocatable<?> uncheckedUnmarshal(Object object, XmlAdapter<T, ?> xmlAdapter)
throws Exception {
return (MutableLocatable<?>) xmlAdapter.unmarshal((T) object);
}
@Test(dependsOnMethods = "setters")
public void xmlSerialization() throws Exception {
MutableLocatable<?> copyWithoutLocations = (MutableLocatable<?>) staticCopyMethod.invoke(
null, instance, new CopyOption[] { StandardCopyOption.STRIP_LOCATION });
Object objectToSerialize = copyWithoutLocations;
Class<?> classToBind = copyWithoutLocations.getClass();
@Nullable XmlAdapter<?, ?> xmlAdapter = null;
if (!clazz.isAnnotationPresent(XmlRootElement.class)) {
Class<?> currentClass = clazz;
while (MutableLocatable.class.isAssignableFrom(currentClass)
&& !MutableLocatable.class.equals(currentClass)
&& !MutableAnnotatedConstruct.class.equals(currentClass)) {
@Nullable XmlJavaTypeAdapter typeAdapter = currentClass.getAnnotation(XmlJavaTypeAdapter.class);
if (typeAdapter != null) {
Class<?> typeAdapterClass = typeAdapter.value();
Constructor<?> typeAdapterConstructor = typeAdapterClass.getDeclaredConstructor();
typeAdapterConstructor.setAccessible(true);
xmlAdapter = (XmlAdapter<?, ?>) typeAdapterConstructor.newInstance();
Object xmlAdaptedObject = uncheckedMarshal(copyWithoutLocations, xmlAdapter);
classToBind = xmlAdaptedObject.getClass();
objectToSerialize = uncheckedNewJAXBElement(classToBind, xmlAdaptedObject);
break;
}
currentClass = currentClass.getSuperclass();
}
if (xmlAdapter == null) {
objectToSerialize = uncheckedNewJAXBElement(classToBind, copyWithoutLocations);
}
}
// We use EclipseLink MOXy for marshalling, and the JAXB Reference Implementation for unmarshalling
JAXBContext moxyContext = JAXBContextFactory.createContext(new Class<?>[]{ classToBind }, null);
Marshaller marshaller = moxyContext.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
JAXBContext internalJaxbContext = JAXBContext.newInstance(classToBind);
Unmarshaller unmarshaller = internalJaxbContext.createUnmarshaller();
StringWriter stringWriter = new StringWriter();
marshaller.marshal(objectToSerialize, stringWriter);
Object deserializedObject;
try (StringReader stringReader = new StringReader(stringWriter.toString())) {
deserializedObject = unmarshaller.unmarshal(new StreamSource(stringReader), classToBind).getValue();
}
MutableLocatable<?> deserialized = xmlAdapter == null
? (MutableLocatable<?>) deserializedObject
: uncheckedUnmarshal(deserializedObject, xmlAdapter);
restoreNonSerializableProperties(deserialized, instance);
Assert.assertEquals(deserialized, copyWithoutLocations);
Assert.assertEquals(deserialized.hashCode(), copyWithoutLocations.hashCode());
}
@Test(dependsOnMethods = "setters")
public void visitors() throws IllegalAccessException, InvocationTargetException {
final Object argument = new Object();
for (Method method: clazz.getMethods()) {
Class<?>[] parameterTypes = method.getParameterTypes();
if (method.getName().startsWith("accept") && parameterTypes.length == 2
&& parameterTypes[0].getSimpleName().endsWith("Visitor")) {
Class<?> visitorClass = parameterTypes[0];
InvocationHandler invocationHandler = (proxy, method1, args) -> {
if (method1.getName().startsWith("visit")) {
Assert.assertSame(args[0], instance);
Assert.assertSame(args[1], argument);
return method1.getParameterTypes()[0];
} else {
throw new UnsupportedOperationException(String.format(
"Unexpected call to method %s.", method1
));
}
};
Object visitor = Proxy.newProxyInstance(
visitorClass.getClassLoader(), new Class<?>[]{ visitorClass }, invocationHandler);
Class<?> result = (Class<?>) method.invoke(instance, visitor, argument);
Assert.assertTrue(result.isInstance(instance));
}
}
}
private static final class NullOutputStream extends OutputStream {
@Override
public void write(int theByte) { }
@Override
public void write(byte[] bytes, int off, int len) { }
}
private static final class CheckingObjectOutputStream extends ObjectOutputStream {
private enum Dummy {
INSTANCE
}
private final boolean firstRun;
private final IdentityHashMap<Object, Boolean> identityHashMap;
@Nullable private final IdentityHashMap<Object, Boolean> duplicates;
CheckingObjectOutputStream(@Nullable IdentityHashMap<Object, Boolean> identityHashMap) throws IOException {
super(new NullOutputStream());
if (identityHashMap == null) {
firstRun = true;
this.identityHashMap = new IdentityHashMap<>();
duplicates = null;
} else {
firstRun = false;
this.identityHashMap = identityHashMap;
duplicates = new IdentityHashMap<>();
}
enableReplaceObject(true);
}
private static boolean isImmutable(Object object) {
for (Class<?> clazz: Arrays.asList(Immutable.class, Enum.class, Pattern.class, String.class, Integer.class,
URI.class)) {
if (clazz.isAssignableFrom(object.getClass())) {
return true;
}
}
return false;
}
@Override
protected Object replaceObject(Object obj) throws IOException {
if (isImmutable(obj)) {
// Break recursion: No need to descend into immutable objects
return Dummy.INSTANCE;
}
if (firstRun) {
identityHashMap.put(obj, Boolean.TRUE);
} else {
assert duplicates != null;
if (identityHashMap.containsKey(obj)) {
duplicates.put(obj, Boolean.TRUE);
}
}
return super.replaceObject(obj);
}
}
private static IdentityHashMap<Object, Boolean> collectObjects(
Serializable object, @Nullable IdentityHashMap<Object, Boolean> identityHashMap) {
try (CheckingObjectOutputStream checkingObjectOutputStream = new CheckingObjectOutputStream(identityHashMap)) {
checkingObjectOutputStream.writeObject(object);
@Nullable IdentityHashMap<Object, Boolean> returnMap = checkingObjectOutputStream.firstRun
? checkingObjectOutputStream.identityHashMap
: checkingObjectOutputStream.duplicates;
assert returnMap != null;
return returnMap;
} catch (IOException exception) {
throw new IllegalStateException("Unexpected exception.", exception);
}
}
private static List<Object> computeObjectGraphIntersection(Serializable first, Serializable second) {
IdentityHashMap<Object, Boolean> firstObjectMap = collectObjects(first, null);
IdentityHashMap<Object, Boolean> duplicates = collectObjects(second, firstObjectMap);
return new ArrayList<>(duplicates.keySet());
}
}
| |
/**
* Copyright (c) 2015 MapR, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ojai.tests.json;
import static org.junit.Assert.assertEquals;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.sql.Date;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.ojai.Document;
import org.ojai.DocumentReader;
import org.ojai.DocumentReader.EventType;
import org.ojai.Value.Type;
import org.ojai.json.Json;
import org.ojai.util.Values;
public class TestJsonDocument {
@Test
public void testAllTypes() {
Document rec = Json.newDocument();
rec.set("map.field1", (byte) 100);
rec.set("map.field2", (short) 10000);
rec.set("map.longfield2verylongverylong", 12.345678);
rec.set("FIELD2", "VERY LONG STRING IS THIS YOU KNOW");
rec.set("map2.field1", (byte) 100);
rec.set("map2.field2", (short) 10000);
rec.set("map2.longfield2verylongverylong", 12.345678);
rec.set("FIELD3", "VERY LONG STRING IS THIS YOU KNOW");
Map<String, Object> map = new HashMap<String, Object>();
map.put("Name", "Anurag");
map.put("Age", 20);
rec.set("newmap.map", map);
rec.set("map.boolean", false);
rec.set("map.string", "string");
rec.set("map.byte", (byte) 100);
rec.set("map.short", (short) 10000);
rec.set("map.int", 50000);
rec.set("map.long", 12345678999L);
rec.set("map.float", 10.1234f);
rec.set("map.double", 10.12345678910d);
// rec.set("map.interval", new Interval(1000, 2000));
rec.set("map.decimal", new BigDecimal("1000000000.11111111111111111111"));
byte[] bytes = new byte[5];
for (int i = 0; i < bytes.length; ++i) {
bytes[i] = (byte) i;
}
rec.set("map.binary1", bytes);
rec.set("map.binary2", bytes, 1, 3);
ByteBuffer bbuf = ByteBuffer.allocate(100);
for (int i = 0; i < bbuf.capacity(); ++i) {
bbuf.put((byte) i);
}
rec.set("map.binary3", bbuf);
map = new HashMap<String, Object>();
map.put("Name", "Anurag");
map.put("Age", 20);
List<Integer> scores = new ArrayList<Integer>();
scores.add(100);
scores.add(200);
scores.add(300);
scores.add(400);
// map.put("Score", scores);
rec.set("map.map", map);
List<Object> values = new ArrayList<Object>();
values.add("Field1");
values.add(new Integer(500));
values.add(new Double(5555.5555));
rec.set("map.list", values);
assertEquals(rec.getValue("map").getType(), Type.MAP);
assertEquals(rec.getBoolean("map.boolean"), false);
assertEquals(rec.getString("map.string"), "string");
assertEquals(rec.getByte("map.byte"), (byte) 100);
assertEquals(rec.getShort("map.short"), (short) 10000);
assertEquals(rec.getInt("map.int"), 50000);
assertEquals(rec.getLong("map.long"), 12345678999L);
assertEquals(rec.getFloat("map.float"), (float) 10.1234, 0.0);
assertEquals(rec.getDouble("map.double"), 10.12345678910d, 0.0);
// assertEquals(rec.getInterval("map.interval"), new Interval(1000, 2000));
assertEquals(rec.getDecimal("map.decimal"), new BigDecimal(
"1000000000.11111111111111111111"));
java.nio.ByteBuffer readBuf;
readBuf = rec.getBinary("map.binary1");
for (int i = 0; i < bytes.length; ++i) {
assertEquals(readBuf.get(i), bytes[i]);
}
readBuf = rec.getBinary("map.binary2");
for (int i = 0; i < 3; ++i) {
assertEquals(readBuf.get(), bytes[1 + i]);
}
readBuf = rec.getBinary("map.binary3");
assertEquals(readBuf, bbuf);
try {
List<Object> l = rec.getValue("map.list").getList();
assertEquals(values, l);
} catch (Exception e) {
System.out.println("Exception from list test " + e.getMessage());
}
}
@Test
public void testAsReaderFull() {
Document document = Json.newDocument();
document.set("map.byte", (byte)127);
document.set("map.long", 123456789);
Map<String, Object> map = new HashMap<String, Object>();
map.put("first","John");
map.put("first", "Doe");
document.set("map.name", map);
List<Object> mylist = new ArrayList<Object>();
mylist.add(true);
mylist.add("string");
mylist.add(123.456);
document.set("map.array", mylist);
DocumentReader myReader = document.asReader();
EventType et ;
String fieldName = null;
while ((et = myReader.next()) != null) {
if (et == EventType.FIELD_NAME) {
fieldName = myReader.getFieldName();
}
if ((et == EventType.BYTE) && (fieldName.equals("byte"))) {
assertEquals((byte)127, myReader.getByte());
}
if ((et == EventType.STRING) && (fieldName.equals("array"))) {
assertEquals("string", myReader.getString());
}
}
}
@Test
public void testAsReaderPartial() {
Document document = Json.newDocument();
document.set("map.byte", (byte)127);
document.set("map.num", 12345);
Map<String, Object> map = new HashMap<String, Object>();
map.put("first","John");
map.put("last", "Doe");
map.put("id", (long)123456789);
document.set("map.name", map);
List<Object> mylist = new ArrayList<Object>();
mylist.add(true);
mylist.add("string");
mylist.add(123.456);
document.set("map.array", mylist);
DocumentReader myReader = document.asReader("map.name");
EventType event;
String fieldName = null;
while ((event = myReader.next()) != null) {
if (event == EventType.FIELD_NAME) {
fieldName = myReader.getFieldName();
}
if (event == EventType.LONG) {
assertEquals("id", fieldName);
assertEquals(123456789, myReader.getLong());
}
}
}
/*
* Unit test for asReader created on a leaf node of DOM tree.
*/
@Test
public void testAsReaderLeaf() {
Document document = Json.newDocument();
document.set("map.byte", (byte)127);
document.set("map.num", 12345);
Map<String, Object> m = new HashMap<String, Object>();
m.put("first", "John");
m.put("last", "Doe");
m.put("age", (short)45);
document.set("map.info", m);
DocumentReader myReader = document.asReader("map.info.age");
EventType event;
int numtokens = 0;
while ((event = myReader.next()) != null) {
if (event == EventType.SHORT) {
numtokens++;
assertEquals((short)45, myReader.getShort());
}
}
assertEquals(1, numtokens);
}
@Test
public void testGetMap() {
Document document = Json.newDocument();
Map<String, Object> map = new HashMap<String, Object>();
map.put("a", 1);
map.put("b", "A");
document.set("map", map);
assertEquals(map, document.getMap("map"));
}
@Test
public void testSetBooleanArray() {
Document document = Json.newDocument();
document.set("map.int", 111);
boolean[] boolArray = new boolean[3];
boolArray[0] = true;
boolArray[1] = false;
boolArray[2] = true;
document.setArray("map.boolarray", boolArray);
assertEquals(false, document.getBoolean("map.boolarray[1]"));
assertEquals(true, document.getBoolean("map.boolarray[2]"));
}
@Test
public void testDateWithLongMaxMin() {
Document doc = Json.newDocument();
Date d1 = new Date(Long.MAX_VALUE);
Date d2 = new Date(Long.MIN_VALUE);
doc.set("maxdate", d1);
doc.set("boolean", false);
doc.set("mindate", d2);
System.out.println(d1.getTime());
System.out.println(doc.getDate("maxdate").getTime());
assertEquals(true, doc.getValue("maxdate").equals(d1));
assertEquals(true, doc.getValue("mindate").equals(d2));
}
@Test
public void testDate() {
Document doc = Json.newDocument();
doc.set("d1", Values.parseDate("2005-06-22"));
Date d = new Date(new java.util.Date().getTime());
doc.set("d2", d);
System.out.println(doc.getDate("d1"));
System.out.println(doc.getDate("d2"));
assertEquals(true, doc.getDate("d1").toString().equals("2005-06-22"));
assertEquals(true, doc.getDate("d2").toString().equals(d.toString()));
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package kieker.tools.slastic.metamodel.executionEnvironment.util;
import java.util.List;
import kieker.tools.slastic.metamodel.core.Entity;
import kieker.tools.slastic.metamodel.core.FQNamedEntity;
import kieker.tools.slastic.metamodel.core.NamedEntity;
import kieker.tools.slastic.metamodel.core.SLAsticModel;
import kieker.tools.slastic.metamodel.executionEnvironment.*;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)}
* to invoke the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see kieker.tools.slastic.metamodel.executionEnvironment.ExecutionEnvironmentPackage
* @generated
*/
public class ExecutionEnvironmentSwitch<T> {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ExecutionEnvironmentPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ExecutionEnvironmentSwitch() {
if (modelPackage == null) {
modelPackage = ExecutionEnvironmentPackage.eINSTANCE;
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
public T doSwitch(EObject theEObject) {
return doSwitch(theEObject.eClass(), theEObject);
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(EClass theEClass, EObject theEObject) {
if (theEClass.eContainer() == modelPackage) {
return doSwitch(theEClass.getClassifierID(), theEObject);
}
else {
List<EClass> eSuperTypes = theEClass.getESuperTypes();
return
eSuperTypes.isEmpty() ?
defaultCase(theEObject) :
doSwitch(eSuperTypes.get(0), theEObject);
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case ExecutionEnvironmentPackage.EXECUTION_CONTAINER: {
ExecutionContainer executionContainer = (ExecutionContainer)theEObject;
T result = caseExecutionContainer(executionContainer);
if (result == null) result = caseFQNamedEntity(executionContainer);
if (result == null) result = caseNamedEntity(executionContainer);
if (result == null) result = caseEntity(executionContainer);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ExecutionEnvironmentPackage.NETWORK_LINK: {
NetworkLink networkLink = (NetworkLink)theEObject;
T result = caseNetworkLink(networkLink);
if (result == null) result = caseFQNamedEntity(networkLink);
if (result == null) result = caseNamedEntity(networkLink);
if (result == null) result = caseEntity(networkLink);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ExecutionEnvironmentPackage.RESOURCE: {
Resource resource = (Resource)theEObject;
T result = caseResource(resource);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ExecutionEnvironmentPackage.EXECUTION_ENVIRONMENT_MODEL: {
ExecutionEnvironmentModel executionEnvironmentModel = (ExecutionEnvironmentModel)theEObject;
T result = caseExecutionEnvironmentModel(executionEnvironmentModel);
if (result == null) result = caseSLAsticModel(executionEnvironmentModel);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ExecutionEnvironmentPackage.MEM_SWAP_RESOURCE_SPECIFICATION: {
MemSwapResourceSpecification memSwapResourceSpecification = (MemSwapResourceSpecification)theEObject;
T result = caseMemSwapResourceSpecification(memSwapResourceSpecification);
if (result == null) result = caseResourceSpecification(memSwapResourceSpecification);
if (result == null) result = caseNamedEntity(memSwapResourceSpecification);
if (result == null) result = caseEntity(memSwapResourceSpecification);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ExecutionEnvironmentPackage.RESOURCE_SPECIFICATION: {
ResourceSpecification resourceSpecification = (ResourceSpecification)theEObject;
T result = caseResourceSpecification(resourceSpecification);
if (result == null) result = caseNamedEntity(resourceSpecification);
if (result == null) result = caseEntity(resourceSpecification);
if (result == null) result = defaultCase(theEObject);
return result;
}
default: return defaultCase(theEObject);
}
}
/**
* Returns the result of interpreting the object as an instance of '<em>Execution Container</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Execution Container</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseExecutionContainer(ExecutionContainer object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Network Link</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Network Link</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseNetworkLink(NetworkLink object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Resource</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Resource</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseResource(Resource object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Model</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Model</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseExecutionEnvironmentModel(ExecutionEnvironmentModel object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Mem Swap Resource Specification</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Mem Swap Resource Specification</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseMemSwapResourceSpecification(MemSwapResourceSpecification object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Resource Specification</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Resource Specification</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseResourceSpecification(ResourceSpecification object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Entity</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Entity</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseEntity(Entity object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Named Entity</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Named Entity</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseNamedEntity(NamedEntity object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>FQ Named Entity</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>FQ Named Entity</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseFQNamedEntity(FQNamedEntity object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>SL Astic Model</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>SL Astic Model</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseSLAsticModel(SLAsticModel object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
public T defaultCase(EObject object) {
return null;
}
} //ExecutionEnvironmentSwitch
| |
/*
* Copyright 2014-2015 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.common.dto.plugin;
import java.io.Serializable;
import java.util.Arrays;
import org.kaaproject.avro.ui.shared.RecordField;
import org.kaaproject.kaa.common.dto.AbstractDetailDto;
import org.kaaproject.kaa.common.dto.HasId;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties({"rawConfiguration","fieldConfiguration"})
public abstract class PluginDto extends AbstractDetailDto implements HasId, Serializable {
private static final long serialVersionUID = -5156203569187681620L;
private String id;
private String applicationId;
private String pluginTypeName;
private String pluginClassName;
private byte[] rawConfiguration;
private String jsonConfiguration;
private RecordField fieldConfiguration;
public PluginDto() {
super();
}
public PluginDto(PluginDto pluginDto) {
super(pluginDto);
this.id = pluginDto.getId();
this.applicationId = pluginDto.getApplicationId();
this.pluginTypeName = pluginDto.getPluginTypeName();
this.pluginClassName = pluginDto.getPluginClassName();
this.rawConfiguration = pluginDto.getRawConfiguration();
this.jsonConfiguration = pluginDto.getJsonConfiguration();
this.fieldConfiguration = pluginDto.getFieldConfiguration();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getPluginTypeName() {
return pluginTypeName;
}
public void setPluginTypeName(String pluginTypeName) {
this.pluginTypeName = pluginTypeName;
}
public String getPluginClassName() {
return pluginClassName;
}
public void setPluginClassName(String pluginClassName) {
this.pluginClassName = pluginClassName;
}
public byte[] getRawConfiguration() {
return rawConfiguration;
}
public void setRawConfiguration(byte[] rawConfiguration) {
this.rawConfiguration = rawConfiguration;
}
public String getJsonConfiguration() {
return jsonConfiguration;
}
public void setJsonConfiguration(String jsonConfiguration) {
this.jsonConfiguration = jsonConfiguration;
}
public RecordField getFieldConfiguration() {
return fieldConfiguration;
}
public void setFieldConfiguration(RecordField fieldConfiguration) {
this.fieldConfiguration = fieldConfiguration;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result
+ ((applicationId == null) ? 0 : applicationId.hashCode());
result = prime
* result
+ ((fieldConfiguration == null) ? 0 : fieldConfiguration
.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime
* result
+ ((jsonConfiguration == null) ? 0 : jsonConfiguration
.hashCode());
result = prime * result
+ ((pluginClassName == null) ? 0 : pluginClassName.hashCode());
result = prime * result
+ ((pluginTypeName == null) ? 0 : pluginTypeName.hashCode());
result = prime * result + Arrays.hashCode(rawConfiguration);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PluginDto other = (PluginDto) obj;
if (applicationId == null) {
if (other.applicationId != null) {
return false;
}
} else if (!applicationId.equals(other.applicationId)) {
return false;
}
if (fieldConfiguration == null) {
if (other.fieldConfiguration != null) {
return false;
}
} else if (!fieldConfiguration.equals(other.fieldConfiguration)) {
return false;
}
if (id == null) {
if (other.id != null) {
return false;
}
} else if (!id.equals(other.id)) {
return false;
}
if (jsonConfiguration == null) {
if (other.jsonConfiguration != null) {
return false;
}
} else if (!jsonConfiguration.equals(other.jsonConfiguration)) {
return false;
}
if (pluginClassName == null) {
if (other.pluginClassName != null) {
return false;
}
} else if (!pluginClassName.equals(other.pluginClassName)) {
return false;
}
if (pluginTypeName == null) {
if (other.pluginTypeName != null) {
return false;
}
} else if (!pluginTypeName.equals(other.pluginTypeName)) {
return false;
}
if (!Arrays.equals(rawConfiguration, other.rawConfiguration)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("PluginDto [id=");
builder.append(id);
builder.append(", applicationId=");
builder.append(applicationId);
builder.append(", pluginTypeName=");
builder.append(pluginTypeName);
builder.append(", pluginClassName=");
builder.append(pluginClassName);
builder.append(", rawConfiguration=");
builder.append(Arrays.toString(rawConfiguration));
builder.append(", jsonConfiguration=");
builder.append(jsonConfiguration);
builder.append(", fieldConfiguration=");
builder.append(fieldConfiguration);
builder.append(", parent=");
builder.append(super.toString());
builder.append("]");
return builder.toString();
}
}
| |
package org.zstack.core.cloudbus;
import org.apache.logging.log4j.ThreadContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.*;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestClientException;
import org.zstack.core.CoreGlobalProperty;
import org.zstack.core.MessageCommandRecorder;
import org.zstack.core.Platform;
import org.zstack.core.asyncbatch.While;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.retry.Retry;
import org.zstack.core.retry.RetryCondition;
import org.zstack.core.thread.AsyncThread;
import org.zstack.core.thread.SyncTask;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.thread.ThreadFacadeImpl;
import org.zstack.core.timeout.ApiTimeoutManager;
import org.zstack.header.Constants;
import org.zstack.header.Service;
import org.zstack.header.apimediator.StopRoutingException;
import org.zstack.header.core.ExceptionSafe;
import org.zstack.header.core.FutureReturnValueCompletion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.NopeNoErrorCompletion;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudConfigureFailException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.*;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.rest.TimeoutRestTemplate;
import org.zstack.header.search.APISearchMessage;
import org.zstack.header.search.APISearchReply;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.TaskContext;
import org.zstack.utils.Utils;
import org.zstack.utils.gson.JSONObjectUtil;
import org.zstack.utils.logging.CLogger;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import static org.zstack.core.Platform.*;
import static org.zstack.utils.BeanUtils.getProperty;
import static org.zstack.utils.BeanUtils.setProperty;
public class CloudBusImpl3 implements CloudBus, CloudBusIN {
private static final CLogger logger = Utils.getLogger(CloudBusImpl3.class);
@Autowired
private ThreadFacade thdf;
@Autowired
private ErrorFacade errf;
@Autowired
private ApiTimeoutManager timeoutMgr;
@Autowired
private ResourceDestinationMaker destMaker;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private DeadMessageManager deadMessageManager;
private final String NO_NEED_REPLY_MSG = "noReply";
private final String CORRELATION_ID = "correlationId";
private final String REPLY_TO = "replyTo";
private final String IS_MESSAGE_REPLY = "isReply";
private final String THREAD_CONTEXT_STACK = "thread-context-stack";
private final String THREAD_CONTEXT = "thread-context";
private final String TASK_CONTEXT = "task-context";
final static String SERVICE_ID_SPLITTER = ":::";
private String SERVICE_ID = makeLocalServiceId("cloudbus.messages");
private String EVENT_ID = makeLocalServiceId("cloudbus.events");
private List<Service> services = new ArrayList<>();
private Map<Class, List<ReplyMessagePreSendingExtensionPoint>> replyMessageMarshaller = new ConcurrentHashMap<Class, List<ReplyMessagePreSendingExtensionPoint>>();
private Map<Class, List<BeforeDeliveryMessageInterceptor>> beforeDeliveryMessageInterceptors = new HashMap<Class, List<BeforeDeliveryMessageInterceptor>>();
private Map<Class, List<BeforeSendMessageInterceptor>> beforeSendMessageInterceptors = new HashMap<Class, List<BeforeSendMessageInterceptor>>();
private Map<Class, List<BeforePublishEventInterceptor>> beforeEventPublishInterceptors = new HashMap<Class, List<BeforePublishEventInterceptor>>();
private List<BeforeDeliveryMessageInterceptor> beforeDeliveryMessageInterceptorsForAll = new ArrayList<BeforeDeliveryMessageInterceptor>();
private List<BeforeSendMessageInterceptor> beforeSendMessageInterceptorsForAll = new ArrayList<BeforeSendMessageInterceptor>();
private List<BeforePublishEventInterceptor> beforeEventPublishInterceptorsForAll = new ArrayList<BeforePublishEventInterceptor>();
private Map<String, Map<String, CloudBusEventListener>> eventListeners = new ConcurrentHashMap<>();
private Set<String> filterMsgNames = new HashSet<>();
private Map<String, EndPoint> endPoints = new HashMap<>();
private Map<String, Envelope> envelopes = new ConcurrentHashMap<>();
private Map<String, java.util.function.Consumer> messageConsumers = new ConcurrentHashMap<>();
private static TimeoutRestTemplate http = RESTFacade.createRestTemplate(CoreGlobalProperty.REST_FACADE_READ_TIMEOUT, CoreGlobalProperty.REST_FACADE_CONNECT_TIMEOUT);
public static final String HTTP_BASE_URL = "/cloudbus";
{
if (CloudBusGlobalProperty.MESSAGE_LOG != null) {
String[] msgNames = CloudBusGlobalProperty.MESSAGE_LOG.split(",");
for (String name : msgNames) {
filterMsgNames.add(name.trim());
}
}
if (CoreGlobalProperty.UNIT_TEST_ON && !CoreGlobalProperty.SIMULATORS_ON) {
CloudBusGlobalProperty.HTTP_CONTEXT_PATH = "";
CloudBusGlobalProperty.HTTP_PORT = 8989;
}
}
public static String getManagementNodeUUIDFromServiceID(String serviceID) {
String[] ss = serviceID.split(SERVICE_ID_SPLITTER);
if (ss.length != 2) {
throw new CloudRuntimeException(String.format("%s is not a valid message ID", serviceID));
}
return ss[0];
}
private abstract class Envelope {
long startTime;
{
if (CloudBusGlobalConfig.STATISTICS_ON.value(Boolean.class)) {
startTime = System.currentTimeMillis();
}
}
void count(Message msg) {
/*
if (!CloudBusGlobalConfig.STATISTICS_ON.value(Boolean.class)) {
return;
}
long timeCost = System.currentTimeMillis() - startTime;
MessageStatistic statistic = statistics.get(msg.getClass().getName());
statistic.count(timeCost);
*/
}
abstract void ack(MessageReply reply);
abstract void timeout();
}
interface ConsumerReceipt {
void cancel();
}
private ConsumerReceipt on(String serviceId, Consumer consumer) {
Consumer old = messageConsumers.get(serviceId);
if (old != null && old != consumer) {
throw new CloudRuntimeException(String.format("duplicate Consumer[%s,%s] for the same service id[%s]", old.getClass(), consumer.getClass(), serviceId));
}
messageConsumers.put(serviceId, consumer);
return () -> messageConsumers.remove(serviceId);
}
private Consumer<Event> eventConsumer = new Consumer<Event>() {
@ExceptionSafe
private void callListener(Event e, CloudBusEventListener listener) {
listener.handleEvent(e);
}
@Override
@AsyncThread
public void accept(Event evt) {
logger.debug(String.format("[event received]: %s", dumpMessage(evt)));
Map<String, CloudBusEventListener> ls = eventListeners.get(evt.getType().toString());
if (ls == null) {
return;
}
ls.values().forEach(l -> callListener(evt, l));
}
};
private Consumer<Message> messageConsumer = new Consumer<Message>() {
@Override
@AsyncThread
public void accept(Message msg) {
setThreadLoggingContext(msg);
if (logger.isTraceEnabled() && islogMessage(msg)) {
logger.trace(String.format("[msg received]: %s", dumpMessage(msg)));
}
if (msg instanceof MessageReply) {
beforeDeliverMessage(msg);
MessageReply r = (MessageReply) msg;
String correlationId = r.getHeaderEntry(CORRELATION_ID);
Envelope e = envelopes.get(correlationId);
if (e == null) {
logger.warn(String.format("received a message reply but no envelope found," +
"maybe the message request has been timeout or sender doesn't care about reply." +
"drop it. reply dump:\n%s", dumpMessage(r)));
return;
}
e.ack(r);
} else {
dealWithUnknownMessage(msg);
}
}
};
private String dumpMessage(Message msg) {
return String.format("%s %s", msg.getClass().getName(), CloudBusGson.toLogSafeJson(msg));
}
private interface EndPoint {
void active();
void inactive();
}
@Override
public void activeService(Service serv) {
activeService(serv.getId());
}
@Override
public void activeService(String id) {
EndPoint e = endPoints.get(id);
e.active();
}
@Override
public void deActiveService(Service serv) {
deActiveService(serv.getId());
}
@Override
public void deActiveService(String id) {
EndPoint e = endPoints.get(id);
e.inactive();
}
@Override
public void send(Message msg) {
send(msg, true);
}
@Override
public <T extends Message> void send(List<T> msgs) {
msgs.forEach(this::send);
}
@Override
public void send(APIMessage msg, Consumer<APIEvent> consumer) {
subscribeEvent((e) -> {
APIEvent ae = (APIEvent) e;
if (ae.getApiId().equals(msg.getId())) {
consumer.accept(ae);
return true;
}
return false;
}, new APIEvent());
send(msg);
}
private void evaluateMessageTimeout(NeedReplyMessage msg) {
timeoutMgr.setMessageTimeout(msg);
}
private MessageReply createErrorReply(NeedReplyMessage m, ErrorCode err) {
MessageReply r = new MessageReply();
r.putHeaderEntry(CORRELATION_ID, m.getId());
r.setError(err);
return r;
}
private MessageReply createTimeoutReply(NeedReplyMessage m) {
return createErrorReply(m, touterr(m.toErrorString()));
}
@Override
public void send(NeedReplyMessage msg, CloudBusCallBack callback) {
evaluateMessageTimeout(msg);
Envelope e = new Envelope() {
AtomicBoolean called = new AtomicBoolean(false);
final Envelope self = this;
ThreadFacadeImpl.TimeoutTaskReceipt timeoutTaskReceipt = thdf.submitTimeoutTask(self::timeout, TimeUnit.MILLISECONDS, msg.getTimeout());
@Override
public void ack(MessageReply reply) {
count(msg);
envelopes.remove(msg.getId());
if (!called.compareAndSet(false, true)) {
return;
}
timeoutTaskReceipt.cancel();
callback.run(reply);
}
@Override
public void timeout() {
envelopes.remove(msg.getId());
if (!called.compareAndSet(false, true)) {
return;
}
callback.run(createTimeoutReply(msg));
}
};
envelopes.put(msg.getId(), e);
send(msg, false);
}
@Override
public void send(List<? extends NeedReplyMessage> msgs, CloudBusListCallBack callBack) {
send(msgs, msgs.size(), callBack);
}
@Override
public void send(List<? extends NeedReplyMessage> msgs, int parallelLevel, CloudBusListCallBack callBack) {
DebugUtils.Assert(!msgs.isEmpty(), "you cannot pass an empty message list to me");
msgs.forEach(this::evaluateMessageTimeout);
Map<String, MessageReply> replies = Collections.synchronizedMap(new HashMap<>(msgs.size()));
new While<>(msgs).step((msg, completion) -> send(msg, new CloudBusCallBack(completion) {
@Override
public void run(MessageReply reply) {
replies.put(msg.getId(), reply);
completion.done();
}
}), parallelLevel).run(new NoErrorCompletion(callBack) {
@Override
public void done() {
List<MessageReply> results = new ArrayList<>();
assert msgs.size() == replies.size();
msgs.forEach(msg -> results.add(replies.get(msg.getId())));
callBack.run(results);
}
});
}
@Override
public void send(List<? extends NeedReplyMessage> msgs, int parallelLevel, CloudBusSteppingCallback callback) {
DebugUtils.Assert(!msgs.isEmpty(), "you cannot pass an empty message list to me");
msgs.forEach(this::evaluateMessageTimeout);
new While<>(msgs).step((msg, completion) -> send(msg, new CloudBusCallBack(completion) {
@Override
public void run(MessageReply reply) {
callback.run(msg, reply);
completion.done();
}
}), parallelLevel).run(new NopeNoErrorCompletion());
}
@Override
public void route(List<Message> msgs) {
msgs.forEach(this::route);
}
@Override
public void route(Message msg) {
if (msg.getServiceId() == null) {
throw new IllegalArgumentException(String.format("service id cannot be null: %s", msg.getClass().getName()));
}
if (msg instanceof NeedReplyMessage) {
evaluateMessageTimeout((NeedReplyMessage) msg);
}
doSendAndCallExtensions(msg);
}
private void callReplyPreSendingExtensions(Message msg, NeedReplyMessage msgReq) {
List<ReplyMessagePreSendingExtensionPoint> exts = replyMessageMarshaller.get(msg.getClass());
if (exts != null) {
for (ReplyMessagePreSendingExtensionPoint ext : exts) {
ext.marshalReplyMessageBeforeSending(msg, msgReq);
}
}
}
@Override
public void reply(Message request, MessageReply reply) {
if (Boolean.valueOf(request.getHeaderEntry(NO_NEED_REPLY_MSG))) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("%s in message%s is set, drop reply%s", NO_NEED_REPLY_MSG,
dumpMessage(request), dumpMessage(reply)));
}
return;
}
reply.getHeaders().put(IS_MESSAGE_REPLY, Boolean.TRUE.toString());
reply.putHeaderEntry(CORRELATION_ID, request.getId());
reply.setServiceId(request.getHeaderEntry(REPLY_TO));
if (request instanceof NeedReplyMessage) {
callReplyPreSendingExtensions(reply, (NeedReplyMessage) request);
}
doSend(reply);
}
@Override
public void publish(List<Event> events) {
events.forEach(this::publish);
}
@Override
public void publish(Event event) {
if (event instanceof APIEvent) {
APIEvent aevt = (APIEvent) event;
DebugUtils.Assert(aevt.getApiId() != null, String.format("apiId of %s cannot be null", aevt.getClass().getName()));
}
callReplyPreSendingExtensions(event, null);
BeforePublishEventInterceptor c = null;
try {
List<BeforePublishEventInterceptor> is = beforeEventPublishInterceptors.get(event.getClass());
if (is != null) {
for (BeforePublishEventInterceptor i : is) {
c = i;
i.beforePublishEvent(event);
}
}
for (BeforePublishEventInterceptor i : beforeEventPublishInterceptorsForAll) {
c = i;
i.beforePublishEvent(event);
}
} catch (StopRoutingException e) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("BeforePublishEventInterceptor[%s] stop publishing event: %s",
c == null ? "null" : c.getClass().getName(), dumpMessage(event)));
}
return;
}
doPublish(event);
}
class MessageSender {
Message msg;
String managementNodeId;
String serviceId;
boolean localSend;
public MessageSender(Message msg) {
this.msg = msg;
serviceId = msg instanceof Event ? EVENT_ID : msg.getServiceId();
String[] ids = serviceId.split(SERVICE_ID_SPLITTER, 2);
managementNodeId = ids.length == 1 ? Platform.getManagementServerId() : ids[0];
localSend = !CloudBusGlobalProperty.HTTP_ALWAYS && managementNodeId.equals(Platform.getManagementServerId());
}
void send() {
try {
doSend();
} catch (Throwable th) {
replyErrorIfNeeded(operr(th.getMessage()));
}
}
private void doSend() {
if (msg instanceof Event) {
eventSend();
return;
}
if (localSend) {
localSend();
} else {
httpSend();
}
}
private void httpSend() {
buildSchema(msg);
try {
String ip = destMaker.getNodeInfo(managementNodeId).getNodeIP();
httpSend(ip);
} catch (ManagementNodeNotFoundException e) {
if (msg instanceof MessageReply) {
if (!deadMessageManager.handleManagementNodeNotFoundError(managementNodeId, msg, () -> {
String ip = destMaker.getNodeInfo(managementNodeId).getNodeIP();
httpSend(ip);
})) {
throw e;
}
} else {
throw e;
}
}
}
private void httpSend(String ip) {
String url = CloudBusGlobalProperty.HTTP_CONTEXT_PATH.isEmpty() ? String.format("http://%s:%s%s",
ip, CloudBusGlobalProperty.HTTP_PORT, HTTP_BASE_URL) : String.format("http://%s:%s/%s/%s",
ip, CloudBusGlobalProperty.HTTP_PORT, CloudBusGlobalProperty.HTTP_CONTEXT_PATH, HTTP_BASE_URL);
HttpHeaders headers = new HttpHeaders();
HttpEntity<String> req = new HttpEntity<>(CloudBusGson.toJson(msg), headers);
try {
ResponseEntity<String> rsp = new Retry<ResponseEntity<String>>() {
{
interval = 2;
}
@Override
@RetryCondition(onExceptions = {IOException.class, RestClientException.class, HttpClientErrorException.class})
protected ResponseEntity<String> call() {
return http.exchange(url, HttpMethod.POST, req, String.class);
}
}.run();
if (!rsp.getStatusCode().is2xxSuccessful()) {
replyErrorIfNeeded(operr("HTTP ERROR, status code: %s, body: %s", rsp.getStatusCode(), rsp.getBody()));
}
} catch (OperationFailureException e) {
replyErrorIfNeeded(e.getErrorCode());
} catch (Throwable e) {
replyErrorIfNeeded(operr(e.getMessage()));
}
}
private void replyErrorIfNeeded(ErrorCode errorCode) {
if (msg instanceof APIMessage) {
throw new OperationFailureException(errorCode);
} else if (msg instanceof NeedReplyMessage) {
MessageReply reply = createErrorReply((NeedReplyMessage) msg, errorCode);
messageConsumer.accept(reply);
}
}
private void buildSchema(Message msg) {
try {
msg.putHeaderEntry(CloudBus.HEADER_SCHEMA, new JsonSchemaBuilder(msg).build());
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private void eventSend() {
buildSchema(msg);
localSend();
destMaker.getAllNodeInfo().forEach(node -> {
if (!node.getNodeUuid().equals(Platform.getManagementServerId())) {
httpSend(node.getNodeIP());
}
});
}
private void localSend() {
Consumer consumer = messageConsumers.get(serviceId);
if (consumer != null) {
consumer.accept(msg);
} else {
dealWithUnknownMessage(msg);
}
}
}
private void doPublish(Event evt) {
evalThreadContextToMessage(evt);
if (logger.isTraceEnabled() && islogMessage(evt)) {
logger.trace(String.format("[event publish]: %s", dumpMessage(evt)));
}
new MessageSender(evt).send();
}
@Override
public MessageReply call(NeedReplyMessage msg) {
FutureReturnValueCompletion future = new FutureReturnValueCompletion(null);
send(msg, new CloudBusCallBack(future) {
@Override
public void run(MessageReply reply) {
future.success(reply);
}
});
future.await();
return future.getResult();
}
@Override
public <T extends NeedReplyMessage> List<MessageReply> call(List<T> msgs) {
FutureReturnValueCompletion future = new FutureReturnValueCompletion(null);
DebugUtils.Assert(!msgs.isEmpty(), "cannot call empty messages");
send(msgs, new CloudBusListCallBack(future) {
@Override
public void run(List<MessageReply> replies) {
future.success(replies);
}
});
future.await();
return future.getResult();
}
private void setThreadLoggingContext(Message msg) {
ThreadContext.clearAll();
if (msg instanceof APIMessage) {
ThreadContext.put(Constants.THREAD_CONTEXT_API, msg.getId());
ThreadContext.put(Constants.THREAD_CONTEXT_TASK_NAME, msg.getClass().getName());
} else {
Map<String, String> ctx = msg.getHeaderEntry(THREAD_CONTEXT);
if (ctx != null) {
ThreadContext.putAll(ctx);
}
}
if (msg.getHeaders().containsKey(THREAD_CONTEXT_STACK)) {
List<String> taskStack = msg.getHeaderEntry(THREAD_CONTEXT_STACK);
ThreadContext.setStack(taskStack);
}
if (msg.getHeaders().containsKey(TASK_CONTEXT)) {
TaskContext.setTaskContext(msg.getHeaderEntry(TASK_CONTEXT));
}
}
private boolean islogMessage(Message msg) {
if (CloudBusGlobalProperty.READ_API_LOG_OFF && (msg instanceof APISyncCallMessage || msg instanceof APIReply)) {
return false;
} else if (CloudBusGlobalProperty.MESSAGE_LOG_FILTER_ALL) {
return !filterMsgNames.contains(msg.getClass().getName());
} else {
return filterMsgNames.contains(msg.getClass().getName());
}
}
private void beforeDeliverMessage(Message msg) {
List<BeforeDeliveryMessageInterceptor> is = beforeDeliveryMessageInterceptors.get(msg.getClass());
if (is != null) {
for (BeforeDeliveryMessageInterceptor i : is) {
i.beforeDeliveryMessage(msg);
}
}
for (BeforeDeliveryMessageInterceptor i : beforeDeliveryMessageInterceptorsForAll) {
i.beforeDeliveryMessage(msg);
}
}
@Override
public void registerService(Service serv) throws CloudConfigureFailException {
int syncLevel = serv.getSyncLevel();
EndPoint endPoint = new EndPoint() {
ConsumerReceipt registration;
Consumer<Message> consumer = msg -> {
try {
if (logger.isTraceEnabled() && islogMessage(msg)) {
logger.trace(String.format("[msg received]: %s", dumpMessage(msg)));
}
SyncTask<Void> task = new SyncTask<Void>() {
@Override
public String getSyncSignature() {
return serv.getId();
}
@Override
public int getSyncLevel() {
return syncLevel;
}
@Override
public String getName() {
return String.format("CloudBus EndPoint[%s]", serv.getId());
}
@Override
public Void call() {
setThreadLoggingContext(msg);
try {
beforeDeliverMessage(msg);
serv.handleMessage(msg);
} catch (Throwable t) {
logExceptionWithMessageDump(msg, t);
if (t instanceof OperationFailureException) {
replyErrorByMessageType(msg, ((OperationFailureException) t).getErrorCode());
} else {
replyErrorByMessageType(msg, inerr(t.getMessage()));
}
}
return null;
}
};
if (syncLevel == 0) {
thdf.submit(task);
} else {
thdf.syncSubmit(task);
}
} catch (Throwable t) {
logger.warn("unhandled throwable", t);
}
};
@Override
public void active() {
registration = on(serv.getId(), consumer);
}
@Override
public void inactive() {
if (registration != null) {
registration.cancel();
}
}
};
DebugUtils.Assert(!endPoints.containsKey(serv.getId()), String.format("duplicate services[id:%s]", serv.getId()));
endPoints.put(serv.getId(), endPoint);
endPoint.active();
logger.debug(String.format("registered service[%s]", serv.getId()));
}
@Override
public void unregisterService(Service serv) {
EndPoint ep = endPoints.get(serv.getId());
if (ep != null) {
ep.inactive();
}
endPoints.remove(serv.getId());
}
@Override
public EventSubscriberReceipt subscribeEvent(CloudBusEventListener listener, Event... events) {
String key = Platform.getUuid();
for (Event event : events) {
Map m = eventListeners.computeIfAbsent(event.getType().toString(), k->new ConcurrentHashMap<>());
m.put(key, listener);
}
return new EventSubscriberReceipt() {
@Override
public void unsubscribe(Event e) {
Map m = eventListeners.get(e.getType().toString());
m.remove(key);
}
@Override
public void unsubscribeAll() {
for (Event event : events) {
unsubscribe(event);
}
}
};
}
@Override
public void dealWithUnknownMessage(Message msg) {
String details = String.format("No service deals with message: %s", dumpMessage(msg));
if (msg instanceof APISyncCallMessage) {
APIReply reply = new APIReply();
reply.setError(err(SysErrors.UNKNOWN_MESSAGE_ERROR, details));
reply.setSuccess(false);
reply(msg, reply);
} else if (msg instanceof APIMessage) {
APIEvent evt = new APIEvent(msg.getId());
evt.setError(err(SysErrors.UNKNOWN_MESSAGE_ERROR, details));
publish(evt);
} else if (msg instanceof NeedReplyMessage) {
MessageReply reply = new MessageReply();
reply.setError(err(SysErrors.UNKNOWN_MESSAGE_ERROR, details));
reply.setSuccess(false);
reply(msg, reply);
}
DebugUtils.dumpStackTrace("Dropped an unknown message, " + details);
}
private void replyErrorIfMessageNeedReply(Message msg, ErrorCode code) {
if (msg instanceof NeedReplyMessage) {
MessageReply reply = new MessageReply();
reply.setError(code);
reply.setSuccess(false);
reply(msg, reply);
}
}
@Override
public void replyErrorByMessageType(Message msg, Exception e) {
if (e instanceof OperationFailureException) {
replyErrorByMessageType(msg, ((OperationFailureException) e).getErrorCode());
} else {
replyErrorByMessageType(msg, e.getMessage());
}
}
private void replyErrorIfMessageNeedReply(Message msg, String errStr) {
if (msg instanceof NeedReplyMessage) {
ErrorCode err = inerr(errStr);
replyErrorIfMessageNeedReply(msg, err);
} else {
DebugUtils.dumpStackTrace(String.format("An error happened when dealing with message[%s], because this message doesn't need a reply, we call it out loudly\nerror: %s\nmessage dump: %s", msg.getClass().getName(), errStr, dumpMessage(msg)));
}
}
private void replyErrorExistingApiEvent(APIEvent evt, String err) {
replyErrorExistingApiEvent(evt, inerr(err));
}
private void replyErrorExistingApiEvent(APIEvent evt, ErrorCode err) {
evt.setError(err);
evt.setSuccess(false);
this.publish(evt);
}
private void replyErrorToApiMessage(APIMessage msg, ErrorCode err) {
if (msg instanceof APISyncCallMessage) {
APIReply reply = new APIReply();
reply.setError(err);
reply.setSuccess(false);
this.reply(msg, reply);
} else if (msg instanceof APISearchMessage) {
APISearchReply reply = new APISearchReply();
reply.setError(err);
reply.setSuccess(false);
this.reply(msg, reply);
} else {
APIEvent evt = new APIEvent(msg.getId());
evt.setError(err);
evt.setSuccess(false);
this.publish(evt);
}
}
private void replyErrorToApiMessage(APIMessage msg, String err) {
replyErrorToApiMessage(msg, inerr(err));
}
@Override
public void replyErrorByMessageType(Message msg, String err) {
if (msg instanceof APIMessage) {
replyErrorToApiMessage((APIMessage) msg, err);
} else if (msg instanceof APIEvent) {
replyErrorExistingApiEvent((APIEvent) msg, err);
} else {
replyErrorIfMessageNeedReply(msg, err);
}
}
@Override
public void replyErrorByMessageType(Message msg, ErrorCode err) {
if (msg instanceof APIMessage) {
replyErrorToApiMessage((APIMessage) msg, err);
} else if (msg instanceof APIEvent) {
replyErrorExistingApiEvent((APIEvent) msg, err);
} else {
replyErrorIfMessageNeedReply(msg, err);
}
}
@Override
public void logExceptionWithMessageDump(Message msg, Throwable e) {
if (!(e instanceof OperationFailureException)) {
logger.warn(String.format("unhandled throwable happened when dealing with message[%s], dump: %s", msg.getClass().getName(), dumpMessage(msg)), e);
}
}
private String toServiceId(String serviceId, String mgmtId) {
return mgmtId + SERVICE_ID_SPLITTER + serviceId;
}
@Override
public String makeLocalServiceId(String serviceId) {
return toServiceId(serviceId, Platform.getManagementServerId());
}
@Override
public void makeLocalServiceId(Message msg, String serviceId) {
msg.setServiceId(makeLocalServiceId(serviceId));
}
@Override
public String makeServiceIdByManagementNodeId(String serviceId, String managementNodeId) {
return toServiceId(serviceId, managementNodeId);
}
@Override
public void makeServiceIdByManagementNodeId(Message msg, String serviceId, String managementNodeId) {
msg.setServiceId(makeServiceIdByManagementNodeId(serviceId, managementNodeId));
}
@Override
public String makeTargetServiceIdByResourceUuid(String serviceId, String resourceUuid) {
DebugUtils.Assert(serviceId!=null, "serviceId cannot be null");
DebugUtils.Assert(resourceUuid!=null, "resourceUuid cannot be null");
String mgmtUuid = destMaker.makeDestination(resourceUuid);
return toServiceId(serviceId, mgmtUuid);
}
@Override
public void makeTargetServiceIdByResourceUuid(Message msg, String serviceId, String resourceUuid) {
String targetService = makeTargetServiceIdByResourceUuid(serviceId, resourceUuid);
msg.setServiceId(targetService);
}
@Override
public void installBeforeDeliveryMessageInterceptor(BeforeDeliveryMessageInterceptor interceptor, List<Class<? extends Message>> classes) {
if (classes.size() == 0) {
int order = 0;
for (BeforeDeliveryMessageInterceptor i : beforeDeliveryMessageInterceptorsForAll) {
if (i.orderOfBeforeDeliveryMessageInterceptor() <= interceptor.orderOfBeforeDeliveryMessageInterceptor()) {
order = beforeDeliveryMessageInterceptorsForAll.indexOf(i);
break;
}
}
beforeDeliveryMessageInterceptorsForAll.add(order, interceptor);
return;
}
for (Class clz : classes) {
while (clz != Object.class) {
List<BeforeDeliveryMessageInterceptor> is = beforeDeliveryMessageInterceptors.computeIfAbsent(clz, k -> new ArrayList<>());
synchronized (is) {
int order = 0;
for (BeforeDeliveryMessageInterceptor i : is) {
if (i.orderOfBeforeDeliveryMessageInterceptor() <= interceptor.orderOfBeforeDeliveryMessageInterceptor()) {
order = is.indexOf(i);
break;
}
}
is.add(order, interceptor);
}
clz = clz.getSuperclass();
}
}
}
@Override
public void installBeforeDeliveryMessageInterceptor(BeforeDeliveryMessageInterceptor interceptor, Class<? extends Message>... classes) {
installBeforeDeliveryMessageInterceptor(interceptor, Arrays.asList(classes));
}
@Override
public void installBeforeSendMessageInterceptor(BeforeSendMessageInterceptor interceptor, Class<? extends Message>... classes) {
if (classes.length == 0) {
int order = 0;
for (BeforeSendMessageInterceptor i : beforeSendMessageInterceptorsForAll) {
if (i.orderOfBeforeSendMessageInterceptor() <= interceptor.orderOfBeforeSendMessageInterceptor()) {
order = beforeSendMessageInterceptorsForAll.indexOf(i);
break;
}
}
beforeSendMessageInterceptorsForAll.add(order, interceptor);
return;
}
for (Class clz : classes) {
while (clz != Object.class) {
List<BeforeSendMessageInterceptor> is = beforeSendMessageInterceptors.computeIfAbsent(clz, k -> new ArrayList<>());
synchronized (is) {
int order = 0;
for (BeforeSendMessageInterceptor i : is) {
if (i.orderOfBeforeSendMessageInterceptor() <= interceptor.orderOfBeforeSendMessageInterceptor()) {
order = is.indexOf(i);
break;
}
}
is.add(order, interceptor);
}
clz = clz.getSuperclass();
}
}
}
@Override
public void installBeforePublishEventInterceptor(BeforePublishEventInterceptor interceptor, Class<? extends Event>... classes) {
if (classes.length == 0) {
int order = 0;
for (BeforePublishEventInterceptor i : beforeEventPublishInterceptorsForAll) {
if (i.orderOfBeforePublishEventInterceptor() <= interceptor.orderOfBeforePublishEventInterceptor()) {
order = beforeEventPublishInterceptorsForAll.indexOf(i);
break;
}
}
beforeEventPublishInterceptorsForAll.add(order, interceptor);
return;
}
for (Class clz : classes) {
while (clz != Object.class) {
List<BeforePublishEventInterceptor> is = beforeEventPublishInterceptors.get(clz);
if (is == null) {
is = new ArrayList<>();
beforeEventPublishInterceptors.put(clz, is);
}
synchronized (is) {
int order = 0;
for (BeforePublishEventInterceptor i : is) {
if (i.orderOfBeforePublishEventInterceptor() <= interceptor.orderOfBeforePublishEventInterceptor()) {
order = is.indexOf(i);
break;
}
}
is.add(order, interceptor);
}
clz = clz.getSuperclass();
}
}
}
private void populateExtension() {
services = pluginRgty.getExtensionList(Service.class);
services.forEach(serv->{
assert serv.getId() != null : String.format("service id can not be null[%s]", serv.getClass().getName());
registerService(serv);
});
for (ReplyMessagePreSendingExtensionPoint extp : pluginRgty.getExtensionList(ReplyMessagePreSendingExtensionPoint.class)) {
List<Class> clazzs = extp.getReplyMessageClassForPreSendingExtensionPoint();
if (clazzs == null || clazzs.isEmpty()) {
continue;
}
for (Class clz : clazzs) {
if (!(APIEvent.class.isAssignableFrom(clz)) && !(MessageReply.class.isAssignableFrom(clz))) {
throw new CloudRuntimeException(String.format("ReplyMessagePreSendingExtensionPoint can only marshal APIEvent or MessageReply. %s claimed by %s is neither APIEvent nor MessageReply",
clz.getName(), extp.getClass().getName()));
}
List<ReplyMessagePreSendingExtensionPoint> exts = replyMessageMarshaller.get(clz);
if (exts == null) {
exts = new ArrayList<>();
replyMessageMarshaller.put(clz, exts);
}
exts.add(extp);
}
}
}
@Override
public boolean start() {
on(SERVICE_ID, messageConsumer);
on(EVENT_ID, eventConsumer);
populateExtension();
return true;
}
@Override
public boolean stop() {
return true;
}
private void evalThreadContextToMessage(Message msg) {
Map<String, String> ctx = ThreadContext.getImmutableContext();
if (ctx != null) {
msg.putHeaderEntry(THREAD_CONTEXT, new HashMap<>(ctx));
}
List<String> list = ThreadContext.getImmutableStack().asList();
if (list != null && !list.isEmpty()) {
msg.putHeaderEntry(THREAD_CONTEXT_STACK, new ArrayList<>(list));
}
Map<Object, Object> tctx = TaskContext.getTaskContext();
if (tctx != null) {
msg.putHeaderEntry(TASK_CONTEXT, tctx);
}
}
private void doSendAndCallExtensions(Message msg) {
// for unit test finding invocation chain
MessageCommandRecorder.record(msg.getClass());
List<BeforeSendMessageInterceptor> interceptors = beforeSendMessageInterceptors.get(msg.getClass());
if (interceptors != null) {
for (BeforeSendMessageInterceptor interceptor : interceptors) {
interceptor.beforeSendMessage(msg);
}
}
for (BeforeSendMessageInterceptor interceptor : beforeSendMessageInterceptorsForAll) {
interceptor.beforeSendMessage(msg);
}
doSend(msg);
}
private void doSend(Message msg) {
evalThreadContextToMessage(msg);
if (logger.isTraceEnabled() && islogMessage(msg)) {
logger.trace(String.format("[msg send]: %s", dumpMessage(msg)));
}
new MessageSender(msg).send();
}
private void send(Message msg, Boolean noNeedReply) {
if (msg.getServiceId() == null) {
throw new IllegalArgumentException(String.format("service id cannot be null: %s", msg.getClass().getName()));
}
msg.putHeaderEntry(CORRELATION_ID, msg.getId());
msg.putHeaderEntry(REPLY_TO, SERVICE_ID);
if (msg instanceof APIMessage) {
// API always need reply
msg.putHeaderEntry(NO_NEED_REPLY_MSG, Boolean.FALSE.toString());
} else if (msg instanceof NeedReplyMessage) {
// for NeedReplyMessage sent without requiring receiver to reply,
// mark it, then it will not be tracked and replied
msg.putHeaderEntry(NO_NEED_REPLY_MSG, noNeedReply.toString());
}
doSendAndCallExtensions(msg);
}
private void restoreFromSchema(Message msg, Map raw) throws ClassNotFoundException {
Map<String, String> schema = msg.getHeaderEntry("schema");
if (schema == null || schema.isEmpty()) {
return;
}
raw = (Map) raw.values().iterator().next();
List<String> paths = new ArrayList<>();
paths.addAll(schema.keySet());
for (String p : paths) {
Object dst = getProperty(msg, p);
String type = schema.get(p);
if (dst.getClass().getName().equals(type)) {
continue;
}
Class clz = Class.forName(type);
setProperty(msg, p, JSONObjectUtil.rehashObject(getProperty(raw, p), clz));
}
}
@AsyncThread
public void handleHttpRequest(HttpEntity<String> e, HttpServletResponse rsp) {
try {
Message msg = CloudBusGson.fromJson(e.getBody());
Map raw = JSONObjectUtil.toObject(e.getBody(), LinkedHashMap.class);
try {
restoreFromSchema(msg, raw);
} catch (ClassNotFoundException e1) {
throw new CloudRuntimeException(e1);
}
new MessageSender(msg).localSend();
rsp.setStatus(HttpStatus.OK.value());
} catch (Throwable t) {
logger.warn(String.format("unable to deliver a message received from HTTP. HTTP body: %s", e.getBody()), t);
}
}
}
| |
/*
* Copyright 2019 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.core.appearance;
import eu.binjr.core.dialogs.Dialogs;
import eu.binjr.core.preferences.AppEnvironment;
import eu.binjr.core.preferences.UserPreferences;
import javafx.animation.FadeTransition;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.stage.Stage;
import javafx.util.Duration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.*;
import java.util.stream.Collectors;
/**
* Manages the appearance of registered {@link Stage} instances.
*
* @author Frederic Thevenet
*/
public class StageAppearanceManager {
/**
* Defines a set of options that governs that what degree a the appearance of a registered {@link Stage} should be
* affected.
*/
public enum AppearanceOptions {
/**
* Indicates that no appearance changes should be applied
*/
SET_NONE,
/**
* Indicates that the icon should be changed.
*/
SET_ICON,
/**
* Indicates that the theme should be changed.
*/
SET_THEME,
/**
* Indicates that "curtains" should be used to hide the stage until the theme is fully applied.
*/
USE_STAGE_CURTAIN,
/**
* Indicates that all appearance changes should be applied
*/
SET_ALL;
public long getValue() {
return 1 << this.ordinal();
}
}
private static final Logger logger = LogManager.getLogger(StageAppearanceManager.class);
private static class Holder {
private final static StageAppearanceManager instance = new StageAppearanceManager();
}
private final Map<Stage, Set<AppearanceOptions>> registeredStages;
/**
* Initializes a new instance of the {@link StageAppearanceManager} class.
*/
private StageAppearanceManager() {
registeredStages = new WeakHashMap<>();
UserPreferences.getInstance().userInterfaceTheme.property().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
for (Map.Entry<Stage, Set<AppearanceOptions>> e : registeredStages.entrySet()) {
setAppearance(e.getKey(), newValue, e.getValue());
}
}
});
}
/**
* Get the singleton instance for the {@link StageAppearanceManager} class.
*
* @return the singleton instance for the {@link StageAppearanceManager} class.
*/
public static StageAppearanceManager getInstance() {
return Holder.instance;
}
/**
* Unregister a {@link Stage} from the {@link StageAppearanceManager}
*
* @param stage the {@link Stage} to unregister.
*/
public void unregister(Stage stage) {
if (stage == null) {
logger.warn(() -> "Trying to unregister a stage with null reference");
return;
}
registeredStages.remove(stage);
logger.trace(this::dumpRegisteredStages);
}
private Node installCurtain(Stage stage) {
if (stage.getScene() != null && stage.getScene().getRoot() instanceof Pane) {
ImageView logo = new ImageView(new Image(getClass().getResourceAsStream("/eu/binjr/images/avatar_512.png")));
logo.setFitHeight(256.0);
logo.setFitWidth(256.0);
StackPane curtain = new StackPane(logo);
Pane root = (Pane) stage.getScene().getRoot();
curtain.setStyle("-fx-background-color: #204656;");
root.getChildren().add(curtain);
AnchorPane.setLeftAnchor(curtain, 0.0);
AnchorPane.setRightAnchor(curtain, 0.0);
AnchorPane.setTopAnchor(curtain, 0.0);
AnchorPane.setBottomAnchor(curtain, 0.0);
curtain.toFront();
return curtain;
}
return null;
}
private void raiseCurtain(Stage stage, Node curtain) {
if (curtain != null && stage.getScene().getRoot() instanceof Pane) {
Pane root = (Pane) stage.getScene().getRoot();
FadeTransition ft = new FadeTransition(Duration.millis(250), curtain);
ft.setDelay(Duration.millis(350));
ft.setFromValue(1.0);
ft.setToValue(0.0);
ft.play();
ft.setOnFinished(event -> {
root.getChildren().remove(curtain);
});
}
}
/**
* Registers a {@link Stage} so that its appearance can be altered by the manager.
*
* @param stage the {@link Stage} to register in the {@link StageAppearanceManager}
*/
public void register(Stage stage) {
this.register(stage, AppearanceOptions.SET_ALL);
}
/**
* Registers a {@link Stage} so that its appearance can be altered by the manager, according to the provided
* {@link AppearanceOptions}
*
* @param stage the {@link Stage} to register in the {@link StageAppearanceManager}
* @param options Appearance {@link AppearanceOptions} to apply the the registered stage.
*/
public void register(Stage stage, AppearanceOptions... options) {
if (stage == null) {
throw new IllegalArgumentException("Stage cannot be null");
}
Set<AppearanceOptions> optionsEnumSet = EnumSet.copyOf(Arrays.asList(options));
if (optionsEnumSet.contains(AppearanceOptions.SET_ALL) ||
optionsEnumSet.contains(AppearanceOptions.USE_STAGE_CURTAIN)) {
stage.setOnShown(event -> raiseCurtain(stage, installCurtain(stage)));
}
this.registeredStages.put(stage, optionsEnumSet);
logger.trace(this::dumpRegisteredStages);
Platform.runLater(() ->
setAppearance(stage, UserPreferences.getInstance().userInterfaceTheme.get(), optionsEnumSet));
}
private String dumpRegisteredStages() {
return registeredStages.size() + " registered stage(s): " +
registeredStages.keySet()
.stream()
.map(s -> s.getTitle() + "(" + s.getWidth() + "x" + s.getHeight() + ")")
.collect(Collectors.joining(", "));
}
private void setAppearance(Stage stage, UserInterfaceThemes theme, Set<AppearanceOptions> options) {
if (options.contains(AppearanceOptions.SET_NONE)) {
return;
}
if (options.contains(AppearanceOptions.SET_ALL) || options.contains(AppearanceOptions.SET_THEME)) {
setUiTheme(stage.getScene(), theme);
}
if (options.contains(AppearanceOptions.SET_ALL) || options.contains(AppearanceOptions.SET_ICON)) {
setIcon(stage);
}
}
private void setIcon(Stage stage) {
stage.getIcons().addAll(
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_16.png")),
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_32.png")),
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_48.png")),
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_128.png")),
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_256.png")),
new Image(getClass().getResourceAsStream("/eu/binjr/icons/binjr_512.png")));
}
private void setUiTheme(Scene scene, UserInterfaceThemes theme) {
Dialogs.runOnFXThread(() -> {
scene.getStylesheets().clear();
Application.setUserAgentStylesheet(null);
scene.getStylesheets().addAll(
getClass().getResource(getFontFamilyCssPath()).toExternalForm(),
getClass().getResource("/eu/binjr/css/Icons.css").toExternalForm(),
theme.getClass().getResource(theme.getCssPath()).toExternalForm(),
getClass().getResource("/eu/binjr/css/Common.css").toExternalForm());
});
}
public static String getFontFamilyCssPath() {
switch (AppEnvironment.getInstance().getOsFamily()) {
default:
case WINDOWS:
return "/eu/binjr/css/Fonts-family-win.css";
case LINUX:
return "/eu/binjr/css/Fonts-family-linux.css";
case OSX:
return "/eu/binjr/css/Fonts-family-mac.css";
}
}
public void applyUiTheme(Scene scene) {
setUiTheme(scene, UserPreferences.getInstance().userInterfaceTheme.get());
}
public Color[] getDefaultChartColors() {
return UserPreferences.getInstance().chartColorPalette.get().getPalette();
}
}
| |
/*
* Copyright (c) 2013 Jadler contributors
* This program is made available under the terms of the MIT License.
*/
package net.jadler;
import java.net.URI;
import net.jadler.stubbing.server.StubHttpServerManager;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Collections;
import net.jadler.stubbing.Stubbing;
import net.jadler.stubbing.HttpStub;
import net.jadler.stubbing.StubResponse;
import net.jadler.stubbing.StubbingFactory;
import net.jadler.exception.JadlerException;
import net.jadler.mocking.Verifying;
import net.jadler.stubbing.server.StubHttpServer;
import org.apache.commons.collections.MultiMap;
import org.apache.commons.collections.map.MultiValueMap;
import org.hamcrest.Matcher;
import org.junit.Test;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyObject;
import static org.mockito.Mockito.doThrow;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
public class JadlerMockerTest {
private static final int DEFAULT_STATUS = 204;
private static final String HEADER_NAME1 = "h1";
private static final String HEADER_VALUE1 = "v1";
private static final String HEADER_NAME2 = "h2";
private static final String HEADER_VALUE2 = "v2";
private static final Charset DEFAULT_ENCODING = Charset.forName("UTF-16");
private static final int PORT = 12345;
@Test(expected=IllegalArgumentException.class)
public void constructor1() {
new JadlerMocker(null);
fail("server cannot be null");
}
@Test
public void constructor2() {
new JadlerMocker(mock(StubHttpServer.class));
}
@Test(expected=IllegalArgumentException.class)
public void constructor3() {
new JadlerMocker(null, null);
fail("neither server nor stubbing factory can be null");
}
@Test(expected=IllegalArgumentException.class)
public void constructor4() {
new JadlerMocker(null, mock(StubbingFactory.class));
fail("server cannot be null");
}
@Test(expected=IllegalArgumentException.class)
public void constructor5() {
new JadlerMocker(mock(StubHttpServer.class), null);
fail("stubbing factory cannot be null");
}
@Test
public void constructor6() {
new JadlerMocker(mock(StubHttpServer.class), mock(StubbingFactory.class));
}
@Test(expected=IllegalStateException.class)
public void startNotStopped() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.start();
mocker.start();
fail("mocker has been started already, cannot be started again without being stopped first");
}
@Test(expected=JadlerException.class)
public void startException() throws Exception {
final StubHttpServer server = mock(StubHttpServer.class);
doThrow(new Exception()).when(server).start();
new JadlerMocker(server).start();
fail("server threw an exception");
}
@Test
public void start() throws Exception {
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker jadlerMocker = new JadlerMocker(server);
jadlerMocker.start();
verify(server, times(1)).start();
verify(server, times(1)).registerRequestManager(eq(jadlerMocker));
verifyNoMoreInteractions(server);
}
@Test(expected=IllegalStateException.class)
public void closeNotStarted() {
new JadlerMocker(mock(StubHttpServer.class)).close();
fail("mocker cannot be stopped without being started");
}
@Test(expected=JadlerException.class)
public void closeException() throws Exception {
final StubHttpServer server = mock(StubHttpServer.class);
doThrow(new Exception()).when(server).start();
final StubHttpServerManager mocker = new JadlerMocker(server);
mocker.start();
mocker.close();
fail("server threw an exception");
}
@Test
public void close() throws Exception {
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server);
mocker.start();
mocker.close();
verify(server, times(1)).stop();
}
@Test
public void isStarted() throws Exception {
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker jadlerMocker = new JadlerMocker(server);
assertThat(jadlerMocker.isStarted(), is(false));
jadlerMocker.start();
assertThat(jadlerMocker.isStarted(), is(true));
jadlerMocker.close();
assertThat(jadlerMocker.isStarted(), is(false));
}
@Test(expected=IllegalStateException.class)
public void getStubHttpServerPortNotStarted() {
final StubHttpServerManager serverManager = new JadlerMocker(mock(StubHttpServer.class));
serverManager.getStubHttpServerPort();
fail("server has not been started yet, cannot retrieve the port number");
}
@Test
public void getStubHttpServerPort() {
final StubHttpServer server = mock(StubHttpServer.class);
when(server.getPort()).thenReturn(PORT);
final StubHttpServerManager serverManager = new JadlerMocker(server);
serverManager.start();
assertThat(serverManager.getStubHttpServerPort(), is(PORT));
}
@Test(expected=IllegalArgumentException.class)
public void addDefaultHeaderWrongParam1() {
new JadlerMocker(mock(StubHttpServer.class)).addDefaultHeader(null, "abcd");
fail("default header name cannot be null");
}
@Test(expected=IllegalArgumentException.class)
public void addDefaultHeaderWrongParam2() {
new JadlerMocker(mock(StubHttpServer.class)).addDefaultHeader("abcd", null);
fail("default header value cannot be null");
}
@Test(expected=IllegalStateException.class)
public void addDefaultHeaderWrongState() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
//calling provideStubResponseFor finishes the configuration phase, a default header cannot be added anymore
mocker.provideStubResponseFor(prepareEmptyMockRequest());
mocker.addDefaultHeader("abcd", "efgh");
fail("default header cannot be added anymore");
}
@Test
public void addDefaultHeader1() {
//empty header value is valid
new JadlerMocker(mock(StubHttpServer.class)).addDefaultHeader("abcd", "");
}
@Test
public void addDefaultHeader2() {
new JadlerMocker(mock(StubHttpServer.class)).addDefaultHeader("abcd", "efgh");
}
@Test(expected=IllegalArgumentException.class)
public void setDefaultStatusWrongParam() {
new JadlerMocker(mock(StubHttpServer.class)).setDefaultStatus(-1);
fail("defaultStatus must be at least 0");
}
@Test(expected=IllegalStateException.class)
public void setDefaultStatusWrongState() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.provideStubResponseFor(prepareEmptyMockRequest());
mocker.setDefaultStatus(200);
fail("default status cannot be set anymore");
}
@Test
public void setDefaultStatus() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.setDefaultStatus(200);
}
@Test(expected=IllegalArgumentException.class)
public void setDefaultEncodingWrongParam() {
new JadlerMocker(mock(StubHttpServer.class)).setDefaultEncoding(null);
fail("defaultEncoding mustn't be null");
}
@Test(expected=IllegalStateException.class)
public void setDefaultEncodingWrongState() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.provideStubResponseFor(prepareEmptyMockRequest());
mocker.setDefaultEncoding(DEFAULT_ENCODING);
fail("default encoding cannot be set anymore");
}
@Test
public void setDefaultEncoding() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.setDefaultEncoding(DEFAULT_ENCODING);
}
@Test(expected=IllegalStateException.class)
public void onRequestWrongState() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.provideStubResponseFor(prepareEmptyMockRequest());
mocker.onRequest();
fail("The mocker has already provided first response, cannot do any stubbing anymore");
}
@Test
public void onRequest() {
final Stubbing stubbing = mock(Stubbing.class);
final StubbingFactory sf = mock(StubbingFactory.class);
when(sf.createStubbing(any(Charset.class), anyInt(), any(MultiMap.class))).thenReturn(stubbing);
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
final Stubbing result = (Stubbing) mocker.onRequest();
assertThat(result, is(stubbing));
}
/*
* Tests that defaults (status, headers, encoding) are used correctly when creating a stubbing instance.
*/
@Test
public void onRequestWithDefaults() {
final StubHttpServer server = mock(StubHttpServer.class);
final StubbingFactory sf = mock(StubbingFactory.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
mocker.setDefaultStatus(DEFAULT_STATUS);
mocker.addDefaultHeader(HEADER_NAME1, HEADER_VALUE1);
mocker.addDefaultHeader(HEADER_NAME2, HEADER_VALUE2);
mocker.setDefaultEncoding(DEFAULT_ENCODING);
mocker.onRequest();
//verify the Stubbing instance was created with the given defaults
final MultiMap defaultHeaders = new MultiValueMap();
defaultHeaders.put(HEADER_NAME1, HEADER_VALUE1);
defaultHeaders.put(HEADER_NAME2, HEADER_VALUE2);
verify(sf, times(1)).createStubbing(eq(DEFAULT_ENCODING), eq(DEFAULT_STATUS), eq(defaultHeaders));
}
/*
* Tests that if no default status is set, 200 is used as a super-default
*/
@Test
public void onRequestNoDefaultStatus() {
final StubHttpServer server = mock(StubHttpServer.class);
final StubbingFactory sf = mock(StubbingFactory.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
mocker.onRequest();
//verify the Stubbing instance was created with the default 200 response status
verify(sf, times(1)).createStubbing(any(Charset.class), eq(200), any(MultiMap.class));
}
/*
*
*/
@Test
public void onRequestNoDefaultEncoding() {
final StubHttpServer server = mock(StubHttpServer.class);
final StubbingFactory sf = mock(StubbingFactory.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
mocker.onRequest();
//verify the Stubbing instance was created with the default UTF-8 response encoding
verify(sf, times(1)).createStubbing(eq(Charset.forName("UTF-8")), anyInt(), any(MultiMap.class));
}
@Test
public void onRequestNoDefaultHeaders() {
final StubHttpServer server = mock(StubHttpServer.class);
final StubbingFactory sf = mock(StubbingFactory.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
mocker.onRequest();
//verify the Stubbing instance was created with no default headers
verify(sf, times(1)).createStubbing(any(Charset.class), anyInt(), eq(new MultiValueMap()));
}
//following provideResponseFor() tests are far from being just standard unit tests since they
//need a cooperation of two or more JadlerMocker methods.
@Test
public void provideStubResponseFor() {
final Request req = prepareEmptyMockRequest();
//rule1 matches the given request (param of the provideResponseFor method) so it must be returned from
//the tested method
final HttpStub rule1 = mock(HttpStub.class);
final Stubbing stubbing1 = mock(Stubbing.class);
when(stubbing1.createRule()).thenReturn(rule1);
when(rule1.matches(eq(req))).thenReturn(true);
final StubResponse resp1 = StubResponse.EMPTY;
when(rule1.nextResponse(eq(req))).thenReturn(resp1);
//rule2 doesn't match the given request
final HttpStub rule2 = mock(HttpStub.class);
final Stubbing stubbing2 = mock(Stubbing.class);
when(stubbing2.createRule()).thenReturn(rule2);
when(rule2.matches(eq(req))).thenReturn(false);
final StubbingFactory sf = mock(StubbingFactory.class);
when(sf.createStubbing(any(Charset.class), anyInt(), any(MultiMap.class))).thenReturn(stubbing1, stubbing2);
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
//calling onRequest twice so stubbing1 and stubbing2 are created in the JadlerMocker instance
mocker.onRequest();
mocker.onRequest();
assertThat(mocker.provideStubResponseFor(req), is(resp1));
}
@Test
public void provideStubResponseFor2() {
final Request req = prepareEmptyMockRequest();
//neither rule1 nor rule2 matches, default not-found response must be returned
final HttpStub rule1 = mock(HttpStub.class);
final Stubbing stubbing1 = mock(Stubbing.class);
when(stubbing1.createRule()).thenReturn(rule1);
when(rule1.matches(eq(req))).thenReturn(false);
final HttpStub rule2 = mock(HttpStub.class);
final Stubbing stubbing2 = mock(Stubbing.class);
when(stubbing2.createRule()).thenReturn(rule2);
when(rule2.matches(eq(req))).thenReturn(false);
final StubbingFactory sf = mock(StubbingFactory.class);
when(sf.createStubbing(any(Charset.class), anyInt(), any(MultiMap.class)))
.thenReturn(stubbing1, stubbing2);
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
//calling onRequest twice so stubbing1 and stubbing2 are created in the JadlerMocker instance
mocker.onRequest();
mocker.onRequest();
final StubResponse res = mocker.provideStubResponseFor(req);
assertThat(res, is(not(nullValue())));
assertThat(res.getStatus(), is(404));
assertThat(res.getDelay(), is(0L));
assertThat(res.getBody(), is("No stub response found for the incoming request".getBytes()));
assertThat(res.getEncoding(), is(Charset.forName("UTF-8")));
final KeyValues expectedHeaders = new KeyValues().add("Content-Type", "text/plain; charset=utf-8");
assertThat(res.getHeaders(), is(expectedHeaders));
}
@Test
public void provideStubResponseFor3() {
final Request req = prepareEmptyMockRequest();
//both rules matches the request, the latter must be provided
final HttpStub rule1 = mock(HttpStub.class);
final Stubbing stubbing1 = mock(Stubbing.class);
when(stubbing1.createRule()).thenReturn(rule1);
when(rule1.matches(eq(req))).thenReturn(true);
final StubResponse resp1 = StubResponse.EMPTY;
when(rule1.nextResponse(eq(req))).thenReturn(resp1);
final HttpStub rule2 = mock(HttpStub.class);
final Stubbing stubbing2 = mock(Stubbing.class);
when(stubbing2.createRule()).thenReturn(rule2);
when(rule2.matches(eq(req))).thenReturn(true);
final StubResponse resp2 = StubResponse.EMPTY;
when(rule2.nextResponse(eq(req))).thenReturn(resp2);
final StubbingFactory sf = mock(StubbingFactory.class);
when(sf.createStubbing(any(Charset.class), anyInt(), any(MultiMap.class)))
.thenReturn(stubbing1, stubbing2);
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
//calling onRequest twice so stubbing1 and stubbing2 are created in the JadlerMocker instance
mocker.onRequest();
mocker.onRequest();
assertThat(mocker.provideStubResponseFor(req), is(resp2));
}
@Test
public void reset() {
final Request req = prepareEmptyMockRequest();
final HttpStub rule1 = mock(HttpStub.class);
final Stubbing stubbing1 = mock(Stubbing.class);
when(stubbing1.createRule()).thenReturn(rule1);
when(rule1.matches(eq(req))).thenReturn(true);
final StubResponse resp1 = StubResponse.builder().build();
when(rule1.nextResponse(eq(req))).thenReturn(resp1);
final HttpStub rule2 = mock(HttpStub.class);
final Stubbing stubbing2 = mock(Stubbing.class);
when(stubbing2.createRule()).thenReturn(rule2);
when(rule2.matches(eq(req))).thenReturn(true);
final StubResponse resp2 = StubResponse.builder().build();
when(rule2.nextResponse(eq(req))).thenReturn(resp2);
final StubbingFactory sf = mock(StubbingFactory.class);
when(sf.createStubbing(any(Charset.class), anyInt(), any(MultiMap.class))).thenReturn(stubbing1, stubbing2);
final StubHttpServer server = mock(StubHttpServer.class);
final JadlerMocker mocker = new JadlerMocker(server, sf);
//calling onRequest so stubbing1 is created in the JadlerMocker instance
mocker.onRequest();
assertThat(mocker.provideStubResponseFor(req), is(resp1));
mocker.reset();
//calling onRequest stubbing2 is created in the JadlerMocker instance
mocker.onRequest();
assertThat(mocker.provideStubResponseFor(req), is(resp2));
}
@Test(expected = IllegalStateException.class)
public void verifyThatRequest_noRequestRecording() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.setRecordRequests(false);
mocker.verifyThatRequest();
}
@Test
public void verifyThatRequest() {
final Verifying ongoingVerifying = new JadlerMocker(mock(StubHttpServer.class)).verifyThatRequest();
assertThat(ongoingVerifying, is(not(nullValue())));
}
@Test(expected=IllegalArgumentException.class)
public void numberOfRequestsMatchingInvalidArgument() {
new JadlerMocker(mock(StubHttpServer.class)).numberOfRequestsMatching(null);
fail("matchers cannot be null");
}
@Test(expected=IllegalStateException.class)
@SuppressWarnings("unchecked")
public void numberOfRequestsMatching_noRequestRecording() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
mocker.setRecordRequests(false);
mocker.numberOfRequestsMatching(Collections.<Matcher<? super Request>>singletonList(mock(Matcher.class)));
}
@Test
public void numberOfRequestsMatchingNoReceivedRequest() {
@SuppressWarnings("unchecked")
final Matcher<? super Request> m1 = mock(Matcher.class);
when(m1.matches(anyObject())).thenReturn(true);
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
final int cnt = mocker.numberOfRequestsMatching(Collections.<Matcher<? super Request>>singletonList(m1));
assertThat(cnt, is(0)); //no request received yet, must be zero
}
@Test
public void numberOfRequestsMatchingNoPredicates() {
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
//calling provideStubResponseFor for all three requests so these get recorder in the mocker
mocker.provideStubResponseFor(mock(Request.class));
mocker.provideStubResponseFor(mock(Request.class));
mocker.provideStubResponseFor(mock(Request.class));
final int cnt = mocker.numberOfRequestsMatching(Collections.<Matcher<? super Request>>emptyList());
assertThat(cnt, is(3));
}
@Test
public void numberOfRequestsMatching() {
final Request req1 = mock(Request.class);
final Request req2 = mock(Request.class);
final Request req3 = mock(Request.class);
@SuppressWarnings("unchecked")
final Matcher<? super Request> m1 = mock(Matcher.class);
when(m1.matches(req1)).thenReturn(true);
when(m1.matches(req2)).thenReturn(false);
when(m1.matches(req3)).thenReturn(true);
final JadlerMocker mocker = new JadlerMocker(mock(StubHttpServer.class));
//calling provideStubResponseFor for all three requests so these get recorder in the mocker
mocker.provideStubResponseFor(req1);
mocker.provideStubResponseFor(req2);
mocker.provideStubResponseFor(req3);
final Collection<Matcher<? super Request>> singletonMatcher =
Collections.<Matcher<? super Request>>singletonList(m1);
assertThat(mocker.numberOfRequestsMatching(singletonMatcher), is(2));
mocker.reset();
assertThat(mocker.numberOfRequestsMatching(singletonMatcher), is(0));
}
private Request prepareEmptyMockRequest() {
return Request.builder()
.method("GET")
.requestURI(URI.create("http://localhost/"))
.build();
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.bpel.core.ode.integration;
import com.hazelcast.core.*;
import org.apache.commons.httpclient.HttpConnectionManager;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
import org.apache.commons.httpclient.util.IdleConnectionTimeoutThread;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.evt.DebugBpelEventListener;
import org.apache.ode.bpel.dao.BpelDAOConnectionFactory;
import org.apache.ode.bpel.engine.BpelServerImpl;
import org.apache.ode.bpel.engine.CountLRUDehydrationPolicy;
import org.apache.ode.bpel.engine.cron.CronScheduler;
import org.apache.ode.bpel.extension.ExtensionBundleRuntime;
import org.apache.ode.bpel.extension.ExtensionCorrelationFilter;
import org.apache.ode.bpel.iapi.*;
import org.apache.ode.bpel.intercept.MessageExchangeInterceptor;
import org.apache.ode.bpel.memdao.BpelDAOConnectionFactoryImpl;
import org.apache.ode.il.dbutil.Database;
import org.apache.ode.scheduler.simple.JdbcDelegate;
import org.apache.ode.scheduler.simple.ODECluster;
import org.apache.ode.scheduler.simple.SimpleScheduler;
import org.wso2.carbon.bpel.core.BPELConstants;
import org.wso2.carbon.bpel.core.internal.BPELServerHolder;
import org.wso2.carbon.bpel.core.internal.BPELServiceComponent;
import org.wso2.carbon.bpel.core.ode.integration.config.BPELServerConfiguration;
import org.wso2.carbon.bpel.core.ode.integration.jmx.Instance;
import org.wso2.carbon.bpel.core.ode.integration.jmx.InstanceStatusMonitor;
import org.wso2.carbon.bpel.core.ode.integration.jmx.Processes;
import org.wso2.carbon.bpel.core.ode.integration.store.*;
import org.wso2.carbon.bpel.core.ode.integration.utils.BPELDatabaseCreator;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.MBeanRegistrar;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.NotCompliantMBeanException;
import javax.sql.DataSource;
import javax.transaction.TransactionManager;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
/**
* BPELServer implementation. All the ODE BPEL Engine initialization is handled here.
*/
public final class BPELServerImpl implements BPELServer , Observer{
private static Log log = LogFactory.getLog(BPELServerImpl.class);
/* ODE BPEL Server instance*/
private BpelServerImpl odeBpelServer;
private ProcessStoreImpl processStore;
private TransactionManager transactionManager;
/* For the moment it's look like we don't want multi-threaded http connection manager*/
private MultiThreadedHttpConnectionManager httpConnectionManager;
/* BPEL DAO Connection Factory*/
private BpelDAOConnectionFactory daoConnectionFactory;
/* ODE Database manager */
private Database db;
/* ODE Scheduler */
private Scheduler scheduler;
/* ODE Configuration properties */
private ODEConfigurationProperties odeConfigurationProperties;
private ExecutorService executorService;
private CronScheduler cronScheduler;
private IdleConnectionTimeoutThread idleConnectionTimeoutThread;
/* BPEL Server Configuration */
private BPELServerConfiguration bpelServerConfiguration;
private static BPELServerImpl ourInstance = new BPELServerImpl();
public static BPELServerImpl getInstance() {
return ourInstance;
}
private BPELServerImpl() {
}
/**
* Initialize the ODE BPEL engine.
*
* @throws Exception if failed to start the BPEL engine.
*/
public void init() throws Exception {
bpelServerConfiguration = new BPELServerConfiguration();
odeConfigurationProperties = new ODEConfigurationProperties(bpelServerConfiguration);
if (log.isDebugEnabled()) {
log.debug("Initializing transaction manager");
}
initTransactionManager();
if (log.isDebugEnabled()) {
log.debug("Creating data source");
}
initDataSource();
if (log.isDebugEnabled()) {
log.debug("Starting DAO");
}
initDAO();
BPELEndpointReferenceContextImpl eprContext =
new BPELEndpointReferenceContextImpl();
if (log.isDebugEnabled()) {
log.debug("Initializing BPEL process store");
}
initProcessStore(eprContext);
if (log.isDebugEnabled()) {
log.debug("Initializing BPEL server");
}
initBPELServer(eprContext);
if (log.isDebugEnabled()) {
log.debug("Initializing multithreaded connection manager");
}
initHttpConnectionManager();
/* Register event listeners configured in ode-axis2.properties file*/
registerEventListeners();
/* Register message exchange interceptors configured in ode-axis.properties file*/
registerMexInterceptors();
registerExtensionActivityBundles();
registerExtensionCorrelationFilters();
//registerExtensionActivityBundles();
//registerExternalVariableModules();
try {
odeBpelServer.start();
} catch (Exception e) {
shutdown();
String errMsg = "BPEL Server failed to start.";
log.error(errMsg, e);
throw new Exception(errMsg, e);
}
if(bpelServerConfiguration.getUseDistributedLock() && isAxis2ClusteringEnabled()) {
BPELServerHolder.getInstance().addObserver(this);
if(log.isDebugEnabled()) {
log.debug("Clustering Enabled, Registering Observer for HazelCast service");
}
}
registerMBeans();
}
/**
* Shutdown ODE BPEL Server, schedulers, process store, database connections and
* http connection pools.
*
* @throws Exception if error occurred while shutting down BPEL Server.
*/
public void shutdown() throws Exception {
if (scheduler != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down quartz scheduler.");
}
scheduler.shutdown();
} catch (Exception e) {
log.warn("Scheduler couldn't be shut down.", e);
} finally {
scheduler = null;
}
}
if (odeBpelServer != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down BPEL server.");
}
odeBpelServer.shutdown();
} catch (Exception e) {
log.warn("Error stopping services.", e);
} finally {
odeBpelServer = null;
}
}
if (cronScheduler != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down cron scheduler.");
}
cronScheduler.shutdown();
} catch (Exception e) {
log.warn("Cron scheduler couldn't be shutdown.", e);
} finally {
cronScheduler = null;
}
}
if (processStore != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down process store.");
}
processStore.shutdown();
} catch (Exception e) {
log.warn("Process store could not be shutdown.", e);
} finally {
processStore = null;
}
}
if (daoConnectionFactory != null) {
try {
daoConnectionFactory.shutdown();
} catch (Exception e) {
log.warn("DAO shutdown failed.", e);
} finally {
daoConnectionFactory = null;
}
}
if (db != null) {
try {
db.shutdown();
} catch (Exception e) {
log.warn("DB shutdown failed.", e);
} finally {
db = null;
}
}
if (transactionManager != null) {
transactionManager = null;
}
if (httpConnectionManager != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down HTTP Connection Manager.");
}
httpConnectionManager.shutdown();
} catch (Exception e) {
log.warn("HTTP Connection Manager shutdown failed.");
}
}
if (idleConnectionTimeoutThread != null) {
try {
if (log.isDebugEnabled()) {
log.debug("Shutting down Idle Connection Timeout Thread.");
}
idleConnectionTimeoutThread.shutdown();
} catch (Exception e) {
log.warn("Idle connection timeout thread shutdown failed.");
}
}
executorService.shutdown();
log.info("BPEL Server shutdown completed.");
}
/**
* Register BPEL Event listener.
*
* @param eventListenerClass Fully qualified class name of BpelEventListener implementation.
*/
public void registerEventListener(final String eventListenerClass) {
try {
odeBpelServer.registerBpelEventListener(
(BpelEventListener) Class.forName(eventListenerClass).newInstance());
log.info("Registered custom BPEL event listener: " + eventListenerClass);
} catch (Exception e) {
log.warn("Couldn't register the event listener " + eventListenerClass
+ ", the class couldn't loaded properly: ", e);
}
}
/**
* Register ODE message echange interceptor.
*
* @param mexInterceptorClass Fully qualified class name of ODe MexInterceptor implementation
*/
public void registerMessageExchangeInterceptor(final String mexInterceptorClass) {
}
/**
* Get the multi-tenant process store instance of BPEL Server.
*
* @return MultiTenant Process store instance
*/
public MultiTenantProcessStore getMultiTenantProcessStore() {
return processStore;
}
/**
* Get the multi threaded http connection manager to use with external service invocations.
*
* @return HttpConnectionManager instace(multi-threaded implementation).
*/
public HttpConnectionManager getHttpConnectionManager() {
return httpConnectionManager;
}
public BpelServerImpl getODEBPELServer() {
return odeBpelServer;
}
public TransactionManager getTransactionManager() {
return transactionManager;
}
public ODEConfigurationProperties getOdeConfigurationProperties() {
return odeConfigurationProperties;
}
/**
* Initialize the transaction manager.
*
* @throws BPELEngineException If error occured while initializing transaction manager
*/
private void initTransactionManager() throws BPELEngineException {
String txFactoryName = bpelServerConfiguration.getTransactionFactoryClass();
if (log.isDebugEnabled()) {
log.debug("Initializing transaction manager using " + txFactoryName);
}
try {
Class txFactoryClass = this.getClass().getClassLoader().loadClass(txFactoryName);
Object txFactory = txFactoryClass.newInstance();
int transactionTimeout = bpelServerConfiguration.getTransactionManagerTimeout();
if ( transactionTimeout > -1) {
transactionManager = (TransactionManager) txFactoryClass.
getMethod("getTransactionManager", int.class).invoke(txFactory,transactionTimeout);
} else {
transactionManager = (TransactionManager) txFactoryClass.
getMethod("getTransactionManager", (Class[]) null).invoke(txFactory);
}
// Didn't use Debug Transaction manager which used in ODE.
// TODO: Look for the place we use this axis parameter.
//axisConfiguration.addParameter("ode.transaction.manager", transactionManager);
} catch (Exception e) {
log.fatal("Couldn't initialize a transaction manager with factory: "
+ txFactoryName, e);
throw new BPELEngineException("Couldn't initialize a transaction manager with factory: "
+ txFactoryName, e);
}
}
/**
* Initialize the data source.
*
* @throws BPELEngineException If error occured while initializing datasource
*/
private void initDataSource() throws BPELEngineException {
db = new Database(odeConfigurationProperties);
db.setTransactionManager(transactionManager);
if (System.getProperty("setup") != null) {
BPELDatabaseCreator bpelDBCreator;
try {
bpelDBCreator = new BPELDatabaseCreator(
db.<DataSource>lookupInJndi(odeConfigurationProperties.getDbDataSource()));
} catch (Exception e) {
String errMsg = "Error creating BPELDatabaseCreator";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
if (!bpelDBCreator.isDatabaseStructureCreated("SELECT * FROM ODE_SCHEMA_VERSION")) {
try {
//TODO rename following method
bpelDBCreator.createRegistryDatabase();
} catch (Exception e) {
String errMsg = "Error creating BPEL database";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
} else {
if (log.isDebugEnabled()) {
log.debug("BPEL database already exists. Using the old database.");
}
}
}
// In carbon, embedded H2 database for ODE is located at CARBON_HOME/repository/database
String dbRoot = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator
+ "database";
File dbRootDir = new File(dbRoot);
if (dbRootDir.exists() && dbRootDir.isDirectory()) {
db.setWorkRoot(dbRootDir);
} else {
db.setWorkRoot(null);
}
try {
db.start();
} catch (Exception e) {
String errMsg =
"Error starting database connections, check the database configuration!";
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
}
/**
* Initialize ODE DAO connection factory.
*
* @throws BPELEngineException if DAO connection factory creation fails
*/
private void initDAO() throws BPELEngineException {
log.info("Using DAO Connection Factory class: " +
odeConfigurationProperties.getDAOConnectionFactory());
try {
daoConnectionFactory = db.createDaoCF();
} catch (Exception e) {
String errMsg = "Error instantiating DAO Connection Factory class " +
odeConfigurationProperties.getDAOConnectionFactory();
log.error(errMsg, e);
throw new BPELEngineException(errMsg, e);
}
}
/**
* Initialize process store/
*
* @param eprContext Endpoint reference context
* @throws Exception if process store initialization failed
*/
private void initProcessStore(EndpointReferenceContext eprContext) throws Exception {
processStore = new ProcessStoreImpl(eprContext,
db.getDataSource(),
odeConfigurationProperties);
processStore.setLocalBPELDeploymentUnitRepo(new File(CarbonUtils.getCarbonHome() +
File.separator + "repository" + File.separator + "bpel"));
processStore.registerListener(new ProcessStoreListenerImpl());
}
/**
* Init ODE BpelServer.
*
* @param eprContext endpoint reference context.
*/
private void initBPELServer(EndpointReferenceContext eprContext) {
initExecutorService(createThreadFactory());
odeBpelServer = new BpelServerImpl();
setupJobScheduler();
setupCronScheduler();
odeBpelServer.setDaoConnectionFactory(daoConnectionFactory);
odeBpelServer.setInMemDaoConnectionFactory(
new BpelDAOConnectionFactoryImpl(scheduler, odeConfigurationProperties.getInMemMexTtl()));
odeBpelServer.setEndpointReferenceContext(eprContext);
odeBpelServer.setMessageExchangeContext(new BPELMessageExchangeContextImpl());
odeBpelServer.setBindingContext(new BPELBindingContextImpl(this));
odeBpelServer.setScheduler(scheduler);
// TODO: Analyze a way of integrating with lazy loading
activateDehydration();
odeBpelServer.setMigrationTransactionTimeout(
odeConfigurationProperties.getMigrationTransactionTimeout());
odeBpelServer.setConfigProperties(
odeConfigurationProperties.getProperties());
odeBpelServer.init();
odeBpelServer.setInstanceThrottledMaximumCount(
odeConfigurationProperties.getInstanceThrottledMaximumCount());
odeBpelServer.setProcessThrottledMaximumCount(
odeConfigurationProperties.getProcessThrottledMaximumCount());
odeBpelServer.setProcessThrottledMaximumSize(
odeConfigurationProperties.getProcessThrottledMaximumSize());
odeBpelServer.setHydrationLazy(odeConfigurationProperties.isHydrationLazy());
odeBpelServer.setHydrationLazyMinimumSize(
odeConfigurationProperties.getHydrationLazyMinimumSize());
}
/**
* Activate process dehydration.
*/
private void activateDehydration() {
if (bpelServerConfiguration.isProcessDehydrationEnabled()) {
CountLRUDehydrationPolicy dehy = new CountLRUDehydrationPolicy();
if (bpelServerConfiguration.getProcessDehydrationMaxAge() > 0) {
dehy.setProcessMaxAge(bpelServerConfiguration.getProcessDehydrationMaxAge());
if (log.isDebugEnabled()) {
log.debug("Process Max Age: "
+ bpelServerConfiguration.getProcessDehydrationMaxAge());
}
}
if (bpelServerConfiguration.getProcessDehydraionMaxCount() > 0) {
dehy.setProcessMaxCount(bpelServerConfiguration.getProcessDehydraionMaxCount());
if (log.isDebugEnabled()) {
log.debug("Process Max Count: "
+ bpelServerConfiguration.getProcessDehydraionMaxCount());
}
}
odeBpelServer.setDehydrationPolicy(dehy);
log.info("Process Dehydration is activated...");
}
}
/**
* Setting up cron scheduler
*/
private void setupCronScheduler() {
cronScheduler = new CronScheduler();
cronScheduler.setScheduledTaskExec(executorService);
cronScheduler.setContexts(odeBpelServer.getContexts());
odeBpelServer.setCronScheduler(cronScheduler);
cronScheduler.scheduleSystemCronJobs(bpelServerConfiguration.getSystemCleanupCronJobs());
}
private void setupJobScheduler() {
scheduler = createScheduler();
scheduler.setJobProcessor(odeBpelServer);
BpelServerImpl.PolledRunnableProcessor polledRunnableProcessor =
new BpelServerImpl.PolledRunnableProcessor();
polledRunnableProcessor.setPolledRunnableExecutorService(executorService);
polledRunnableProcessor.setContexts(odeBpelServer.getContexts());
scheduler.setPolledRunnableProcesser(polledRunnableProcessor);
}
private Scheduler createScheduler() {
SimpleScheduler simpleScheduler = new SimpleScheduler(bpelServerConfiguration.getNodeId(),
new JdbcDelegate(db.getDataSource()),
odeConfigurationProperties.getProperties());
simpleScheduler.setExecutorService(executorService);
simpleScheduler.setTransactionManager(transactionManager);
return simpleScheduler;
}
private ThreadFactory createThreadFactory() {
return new ThreadFactory() {
private int threadNumber = 0;
public Thread newThread(Runnable r) {
threadNumber += 1;
Thread t = new Thread(r, "BPELServer-" + threadNumber);
t.setDaemon(true);
return t;
}
};
}
private void initExecutorService(ThreadFactory threadFactory) {
if (odeConfigurationProperties.getThreadPoolMaxSize() == 0) {
executorService = Executors.newCachedThreadPool(threadFactory);
} else {
executorService = Executors.newFixedThreadPool(
odeConfigurationProperties.getThreadPoolMaxSize(),
threadFactory);
}
}
private void initHttpConnectionManager() throws Exception {
httpConnectionManager = new MultiThreadedHttpConnectionManager();
int maxConnectionsPerHost = bpelServerConfiguration.getMaxConnectionsPerHost();
int maxTotalConnections = bpelServerConfiguration.getMaxTotalConnections();
if (log.isDebugEnabled()) {
log.debug(HttpConnectionManagerParams.MAX_HOST_CONNECTIONS + "=" + maxConnectionsPerHost);
log.debug(HttpConnectionManagerParams.MAX_TOTAL_CONNECTIONS + "=" + maxTotalConnections);
}
if (maxConnectionsPerHost < 1 || maxTotalConnections < 1) {
String errmsg = HttpConnectionManagerParams.MAX_HOST_CONNECTIONS + " and " +
HttpConnectionManagerParams.MAX_TOTAL_CONNECTIONS
+ " must be positive integers!";
log.error(errmsg);
throw new Exception(errmsg);
}
httpConnectionManager.getParams().setDefaultMaxConnectionsPerHost(maxConnectionsPerHost);
httpConnectionManager.getParams().setMaxTotalConnections(maxTotalConnections);
// TODO: Modify this and move configuration to bps.xml
// Register the connection manager to a idle check thread
idleConnectionTimeoutThread = new IdleConnectionTimeoutThread();
idleConnectionTimeoutThread.setName("Http_Idle_Connection_Timeout_Thread");
long idleConnectionTimeout = Long.parseLong(
odeConfigurationProperties
.getProperty("http.idle.connection.timeout", "30000"));
long idleConnectionCheckInterval = Long.parseLong(
odeConfigurationProperties
.getProperty("http.idle.connection.check.interval", "30000"));
if (log.isDebugEnabled()) {
log.debug("http.idle.connection.timeout=" + idleConnectionTimeout);
log.debug("http.idle.connection.check.interval=" + idleConnectionCheckInterval);
}
idleConnectionTimeoutThread.setConnectionTimeout(idleConnectionTimeout);
idleConnectionTimeoutThread.setTimeoutInterval(idleConnectionCheckInterval);
idleConnectionTimeoutThread.addConnectionManager(httpConnectionManager);
idleConnectionTimeoutThread.start();
}
private void registerEventListeners() {
/* let's always register the debugging listener */
odeBpelServer.registerBpelEventListener(new DebugBpelEventListener());
List<String> eventListeners = bpelServerConfiguration.getEventListeners();
if (!eventListeners.isEmpty()) {
for (String listenerCN : eventListeners) {
try {
odeBpelServer.registerBpelEventListener(
(BpelEventListener) Class.forName(listenerCN).newInstance());
log.info("Registered custom BPEL event listener: " + listenerCN);
} catch (Exception e) {
log.warn("Couldn't register the event listener " + listenerCN
+ ", the class couldn't loaded properly: ", e);
}
}
}
}
private void registerMexInterceptors() {
List<String> mexInterceptors = bpelServerConfiguration.getMexInterceptors();
if (!mexInterceptors.isEmpty()) {
for (String interceptorCN : mexInterceptors) {
try {
odeBpelServer.registerMessageExchangeInterceptor(
(MessageExchangeInterceptor) Class.forName(interceptorCN)
.newInstance());
log.info("Registered message exchange interceptor: " + interceptorCN);
} catch (Exception e) {
log.warn("Couldn't register the message exchange interceptor " + interceptorCN
+ ", the class couldn't be " + "loaded properly.", e);
}
}
}
}
private void registerExtensionActivityBundles() {
try {
log.info("Registering E4X Extension...");
odeBpelServer.registerExtensionBundle((ExtensionBundleRuntime) Class.
forName("org.apache.ode.extension.e4x.JSExtensionBundle").newInstance());
} catch (Exception e) {
log.error("Couldn't register e4x extension bundles runtime.", e);
}
try {
log.info("Registering B4P Extension...");
odeBpelServer.registerExtensionBundle((ExtensionBundleRuntime) Class.
forName("org.wso2.carbon.bpel.b4p.extension.BPEL4PeopleExtensionBundle").newInstance());
} catch (Exception e) {
log.error("Couldn't register B4P extension bundles runtime.", e);
}
//TODO register B4P extension, once it is available
List<String> extensionBundleRuntimes = bpelServerConfiguration.getExtensionBundleRuntimes();
// List<String> extensionBundleValidators = bpelServerConfiguration.getExtensionBundleValidators();
if (extensionBundleRuntimes != null) {
for (String extension : extensionBundleRuntimes) {
try {
// instantiate bundle
ExtensionBundleRuntime bundleRT =
(ExtensionBundleRuntime) Class.forName(extension).newInstance();
// register extension bundle (BPEL server)
odeBpelServer.registerExtensionBundle(bundleRT);
} catch (Exception e) {
log.warn("Couldn't register the extension bundle runtime " + extension +
", the class couldn't be " + "loaded properly.");
}
}
}
//TODO register validators
/*
if (extensionBundleValidators != null) {
Map<QName, ExtensionValidator> validators = new HashMap<QName, ExtensionValidator>();
for (String validator : extensionBundleValidators) {
try {
// instantiate bundle
ExtensionBundleValidation bundleVal =
(ExtensionBundleValidation) Class.forName(validator).newInstance();
//add validators
validators.putAll(bundleVal.getExtensionValidators());
} catch (Exception e) {
log.warn("Couldn't register the extension bundle validator " + validator +
", the class couldn't be " + "loaded properly.");
}
}
// register extension bundle (BPEL store)
store.setExtensionValidators(validators);
}
*/
}
private void registerExtensionCorrelationFilters() {
try {
log.info("Registering B4P Filter...");
odeBpelServer.registerExtensionCorrelationFilter((ExtensionCorrelationFilter) Class.
forName("org.wso2.carbon.bpel.b4p.extension.BPEL4PeopleCorrelationFilter").newInstance());
} catch (Exception e) {
log.error("Couldn't register B4P extension filter.", e);
}
List<String> extensionFilters = bpelServerConfiguration.getExtensionCorrelationFilters();
if (extensionFilters != null) {
// TODO replace StringTokenizer by regex
for (String filter : extensionFilters) {
try {
// instantiate bundle
ExtensionCorrelationFilter filterRT =
(ExtensionCorrelationFilter) Class.forName(filter).newInstance();
// register correlation filter (BPEL server)
odeBpelServer.registerExtensionCorrelationFilter(filterRT);
} catch (Exception e) {
log.warn("Couldn't register the extension correlation filter " + filter + ", the class couldn't be " +
"loaded properly.");
}
}
}
}
private class ProcessStoreListenerImpl implements ProcessStoreListener {
public void onProcessStoreEvent(ProcessStoreEvent processStoreEvent) {
if (log.isDebugEnabled()) {
log.debug("Process store event: " + processStoreEvent);
}
ProcessConf pConf = processStore.getProcessConfiguration(processStoreEvent.pid);
switch (processStoreEvent.type) {
case DEPLOYED:
if (pConf != null) {
/*
* If and only if an old process exists with the same pid,
* the old process is cleaned up. The following line is IMPORTANT and
* used for the case when the deployment and store do not have the
* process while the process itself exists in the BPEL_PROCESS table.
* Notice that the new process is actually created on the 'ACTIVATED'
* event.
*/
odeBpelServer.cleanupProcess(pConf);
}
break;
case ACTIVATED:
// bounce the process
odeBpelServer.unregister(processStoreEvent.pid);
if (pConf != null) {
//odeBpelServer.register(pConf);
try {
odeBpelServer.register(pConf);
} catch (BpelEngineException ex) {
String failureCause = "Process registration failed for:" +
pConf.getProcessId() + ". " + ex.getMessage();
//create DeploymentContext in order to persist the error
int tenantID = processStore.getTenantId(pConf.getProcessId());
String bpelRepoRoot = processStore.getLocalDeploymentUnitRepo().getAbsolutePath();
ProcessConfigurationImpl pConfImpl = (ProcessConfigurationImpl) pConf;
File bpelArchive = new File(pConfImpl.getAbsolutePathForBpelArchive());
BPELDeploymentContext deploymentContext =
new BPELDeploymentContext(tenantID,
bpelRepoRoot, bpelArchive, pConf.getVersion());
deploymentContext.setDeploymentFailureCause(failureCause);
deploymentContext.setStackTrace(ex);
deploymentContext.setFailed(true);
TenantProcessStoreImpl store =
(TenantProcessStoreImpl) processStore.getTenantsProcessStore(tenantID);
try {
store.getBPELPackageRepository().handleBPELPackageDeploymentError(deploymentContext);
} catch (Exception e) {
log.error("Unable to persist the failure cause. Failure: " + failureCause, e);
}
throw ex;
}
} else {
if (log.isDebugEnabled()) {
log.debug("slightly odd:received event " +
processStoreEvent + " for process not in store!");
}
}
break;
case RETIRED:
// are there are instances of this process running?
boolean hasInstances = odeBpelServer.hasActiveInstances(
processStoreEvent.pid);
// Remove the process
odeBpelServer.unregister(processStoreEvent.pid);
// bounce the process if necessary
if (hasInstances) {
if (pConf != null) {
odeBpelServer.register(pConf);
} else {
if (log.isDebugEnabled()) {
log.debug("slightly odd:received event " +
processStoreEvent + " for process not in store!");
}
}
} else {
// we may have potentially created a lot of garbage, so,
// let's hope the garbage collector is configured properly.
if (pConf != null) {
odeBpelServer.cleanupProcess(pConf);
}
}
break;
case DISABLED:
case UNDEPLOYED:
odeBpelServer.unregister(processStoreEvent.pid);
if (pConf != null) {
odeBpelServer.cleanupProcess(pConf);
}
break;
default:
if (log.isDebugEnabled()) {
log.debug("Ignoring store event: " + processStoreEvent);
}
}
if (pConf != null) {
if (processStoreEvent.type == ProcessStoreEvent.Type.UNDEPLOYED) {
if (log.isDebugEnabled()) {
log.debug("Cancelling all cron scheduled jobs on store event: "
+ processStoreEvent);
}
odeBpelServer.getContexts().cronScheduler.cancelProcessCronJobs(
processStoreEvent.pid, true);
}
// Except for undeploy event, we need to re-schedule process dependent jobs
if (log.isDebugEnabled()) {
log.debug("(Re)scheduling cron scheduled jobs on store event: "
+ processStoreEvent);
}
if (processStoreEvent.type != ProcessStoreEvent.Type.UNDEPLOYED) {
odeBpelServer.getContexts().cronScheduler.scheduleProcessCronJobs(
processStoreEvent.pid, pConf);
}
}
}
}
public BPELServerConfiguration getBpelServerConfiguration() {
return bpelServerConfiguration;
}
static class BPELEngineException extends Exception {
public BPELEngineException() {
super();
}
public BPELEngineException(String message) {
super(message);
}
public BPELEngineException(String message, Throwable cause) {
super(message, cause);
}
public BPELEngineException(Throwable cause) {
super(cause);
}
}
public void registerMBeans() throws Exception, MBeanRegistrationException, InstanceAlreadyExistsException, NotCompliantMBeanException {
log.info("Registering MBeans");
Processes processMBean= new Processes();
Instance instanceMBean= new Instance();
InstanceStatusMonitor statusMonitorMBean= InstanceStatusMonitor.getInstanceStatusMonitor();
// ObjectName instanceStatusObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=InstanceStatusMonitor");
// ObjectName processObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=Process");
// ObjectName instanceObjectName= new ObjectName("org.wso2.carbon.bpel.core.ode.integration.jmx:type=Instance");
MBeanRegistrar.registerMBean(processMBean,"org.wso2.carbon.bpel.core.ode.integration.jmx:type=Process");
MBeanRegistrar.registerMBean(instanceMBean, "org.wso2.carbon.bpel.core.ode.integration.jmx:type=Instance");
MBeanRegistrar.registerMBean(statusMonitorMBean, "org.wso2.carbon.bpel.core.ode.integration.jmx:type=InstanceStatusMonitor");
}
public Scheduler getScheduler() {
return scheduler;
}
private boolean isAxis2ClusteringEnabled() {
// return BPELServerHolder.getInstance().getConfigCtxService().
// getServerConfigContext().getAxisConfiguration().getClusteringAgent() != null;
return true;
}
public void update(Observable o, Object arg) {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
if(hazelcastInstance != null) {
String name = hazelcastInstance.getName();
// Set hazelcast instance name as system property
System.setProperty("WSO2_HZ_INSTANCE_NAME", name);
if(bpelServerConfiguration.getUseInstanceStateCache()) {
// set use instance state cache property
System.setProperty("WSO2_USE_STATE_CACHE", "true");
}
odeBpelServer.setHazelcastInstance(hazelcastInstance);
if(log.isInfoEnabled()) {
log.info("Configured HazelCast instance for BPS cluster");
}
// Registering this node in BPS cluster BPS-675.
hazelcastInstance.getCluster().addMembershipListener(new MemberShipListener());
Member localMember = hazelcastInstance.getCluster().getLocalMember();
String localMemberID = getHazelCastNodeID(localMember);
log.info("Registering HZ localMember ID " + localMemberID
+ " as ODE Node ID " + bpelServerConfiguration.getNodeId());
hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP)
.put(localMemberID, bpelServerConfiguration.getNodeId());
}
((SimpleScheduler) scheduler).setCluster(new ODEClusterImpl());
//scheduler.start();
}
/**
* Provides HazelCast node id
* Added to fix BPS-675
*
* @param member
* @return
*/
protected static String getHazelCastNodeID(Member member) {
String hostName = member.getSocketAddress().getHostName();
int port = member.getSocketAddress().getPort();
return hostName + ":" + port;
}
/**
* ODEClusterImpl class is added to fix BPS-675
*/
class ODEClusterImpl implements ODECluster {
@Override
public boolean isClusterEnabled() {
return bpelServerConfiguration.getUseDistributedLock() && isAxis2ClusteringEnabled();
}
/**
* Check whether current node is the leader or not.
* @return boolean
*/
@Override
public boolean isLeader() {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
Member leader = hazelcastInstance.getCluster().getMembers().iterator().next();
if (leader.localMember()) {
return true;
}
return false;
}
/**
* returns Current BPS Nodes in the cluster.
* @return ODE Node list
*/
@Override
public List<String> getKnownNodes() {
List<String> nodeList = new ArrayList<String>();
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
for (Object s : hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).keySet()) {
nodeList.add((String) hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).get(s));
}
return nodeList;
}
}
/**
* MemberShipListener class is added to fix BPS-675
*/
class MemberShipListener implements MembershipListener{
@Override
public void memberAdded(MembershipEvent membershipEvent) {
// Noting to do here.
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
HazelcastInstance hazelcastInstance = BPELServiceComponent.getHazelcastInstance();
Member leader = hazelcastInstance.getCluster().getMembers().iterator().next();
// Allow Leader to update distributed map.
if (leader.localMember()) {
String leftMemberID = getHazelCastNodeID(membershipEvent.getMember());
hazelcastInstance.getMap(BPELConstants.BPS_CLUSTER_NODE_MAP).remove(leftMemberID);
}
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
// Noting to do here.
}
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.tri.obj;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ShortConsumer;
import at.gridtec.lambda4j.consumer.tri.obj.ObjBiFloatConsumer;
import at.gridtec.lambda4j.function.BooleanFunction;
import at.gridtec.lambda4j.function.ByteFunction;
import at.gridtec.lambda4j.function.CharFunction;
import at.gridtec.lambda4j.function.FloatFunction;
import at.gridtec.lambda4j.function.ShortFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiFloatToShortFunction;
import at.gridtec.lambda4j.function.bi.obj.ObjFloatToShortFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ByteToFloatFunction;
import at.gridtec.lambda4j.function.conversion.CharToFloatFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToFloatFunction;
import at.gridtec.lambda4j.function.conversion.FloatToShortFunction;
import at.gridtec.lambda4j.function.conversion.IntToFloatFunction;
import at.gridtec.lambda4j.function.conversion.LongToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ShortToByteFunction;
import at.gridtec.lambda4j.function.conversion.ShortToCharFunction;
import at.gridtec.lambda4j.function.conversion.ShortToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ShortToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ShortToIntFunction;
import at.gridtec.lambda4j.function.conversion.ShortToLongFunction;
import at.gridtec.lambda4j.function.to.ToFloatFunction;
import at.gridtec.lambda4j.function.to.ToShortFunction;
import at.gridtec.lambda4j.function.tri.TriFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriBooleanToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriByteToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriCharToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriDoubleToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriFloatToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriIntToShortFunction;
import at.gridtec.lambda4j.function.tri.conversion.TriLongToShortFunction;
import at.gridtec.lambda4j.function.tri.to.ToShortTriFunction;
import at.gridtec.lambda4j.operator.ternary.ShortTernaryOperator;
import at.gridtec.lambda4j.operator.unary.FloatUnaryOperator;
import at.gridtec.lambda4j.operator.unary.ShortUnaryOperator;
import at.gridtec.lambda4j.predicate.ShortPredicate;
import at.gridtec.lambda4j.predicate.tri.obj.ObjBiFloatPredicate;
import org.apache.commons.lang3.tuple.Triple;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleFunction;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.LongFunction;
/**
* Represents an operation that accepts one object-valued and two {@code float}-valued input arguments and produces a
* {@code short}-valued result.
* This is a (reference, float, float) specialization of {@link TriFunction}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsShort(Object, float, float)}.
*
* @param <T> The type of the first argument to the function
* @see TriFunction
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ObjBiFloatToShortFunction<T> extends Lambda {
/**
* Constructs a {@link ObjBiFloatToShortFunction} based on a lambda expression or a method reference. Thereby the
* given lambda expression or method reference is returned on an as-is basis to implicitly transform it to the
* desired type. With this method, it is possible to ensure that correct type is used from lambda expression or
* method reference.
*
* @param <T> The type of the first argument to the function
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ObjBiFloatToShortFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T> ObjBiFloatToShortFunction<T> of(@Nullable final ObjBiFloatToShortFunction<T> expression) {
return expression;
}
/**
* Calls the given {@link ObjBiFloatToShortFunction} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the function
* @param function The function to be called
* @param t The first argument to the function
* @param value1 The second argument to the function
* @param value2 The third argument to the function
* @return The result from the given {@code ObjBiFloatToShortFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
static <T> short call(@Nonnull final ObjBiFloatToShortFunction<? super T> function, T t, float value1,
float value2) {
Objects.requireNonNull(function);
return function.applyAsShort(t, value1, value2);
}
/**
* Creates a {@link ObjBiFloatToShortFunction} which uses the {@code first} parameter of this one as argument for
* the given {@link ToShortFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code ObjBiFloatToShortFunction} which uses the {@code first} parameter of this one as
* argument for the given {@code ToShortFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjBiFloatToShortFunction<T> onlyFirst(@Nonnull final ToShortFunction<? super T> function) {
Objects.requireNonNull(function);
return (t, value1, value2) -> function.applyAsShort(t);
}
/**
* Creates a {@link ObjBiFloatToShortFunction} which uses the {@code second} parameter of this one as argument for
* the given {@link FloatToShortFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code ObjBiFloatToShortFunction} which uses the {@code second} parameter of this one as
* argument for the given {@code FloatToShortFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjBiFloatToShortFunction<T> onlySecond(@Nonnull final FloatToShortFunction function) {
Objects.requireNonNull(function);
return (t, value1, value2) -> function.applyAsShort(value1);
}
/**
* Creates a {@link ObjBiFloatToShortFunction} which uses the {@code third} parameter of this one as argument for
* the given {@link FloatToShortFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code third} parameter of this one
* @return Creates a {@code ObjBiFloatToShortFunction} which uses the {@code third} parameter of this one as
* argument for the given {@code FloatToShortFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjBiFloatToShortFunction<T> onlyThird(@Nonnull final FloatToShortFunction function) {
Objects.requireNonNull(function);
return (t, value1, value2) -> function.applyAsShort(value2);
}
/**
* Creates a {@link ObjBiFloatToShortFunction} which always returns a given value.
*
* @param <T> The type of the first argument to the function
* @param ret The return value for the constant
* @return A {@code ObjBiFloatToShortFunction} which always returns a given value.
*/
@Nonnull
static <T> ObjBiFloatToShortFunction<T> constant(short ret) {
return (t, value1, value2) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param value1 The second argument to the function
* @param value2 The third argument to the function
* @return The return value from the function, which is its result.
*/
short applyAsShort(T t, float value1, float value2);
/**
* Applies this function partially to some arguments of this one, producing a {@link BiFloatToShortFunction} as
* result.
*
* @param t The first argument to this function used to partially apply this function
* @return A {@code BiFloatToShortFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default BiFloatToShortFunction papplyAsShort(T t) {
return (value1, value2) -> this.applyAsShort(t, value1, value2);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link FloatToShortFunction} as
* result.
*
* @param t The first argument to this function used to partially apply this function
* @param value1 The second argument to this function used to partially apply this function
* @return A {@code FloatToShortFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default FloatToShortFunction papplyAsShort(T t, float value1) {
return (value2) -> this.applyAsShort(t, value1, value2);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ObjFloatToShortFunction} as
* result.
*
* @param value1 The second argument to this function used to partially apply this function
* @return A {@code ObjFloatToShortFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ObjFloatToShortFunction<T> papplyAsShort(float value1) {
return (t, value2) -> this.applyAsShort(t, value1, value2);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ToShortFunction} as result.
*
* @param value1 The second argument to this function used to partially apply this function
* @param value2 The third argument to this function used to partially apply this function
* @return A {@code ToShortFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ToShortFunction<T> papplyAsShort(float value1, float value2) {
return (t) -> this.applyAsShort(t, value1, value2);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 3}.
*/
@Nonnegative
default int arity() {
return 3;
}
/**
* Returns a composed {@link ToShortTriFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param <C> The type of the argument to the third given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ToShortTriFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B, C> ToShortTriFunction<A, B, C> compose(@Nonnull final Function<? super A, ? extends T> before1,
@Nonnull final ToFloatFunction<? super B> before2, @Nonnull final ToFloatFunction<? super C> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (a, b, c) -> applyAsShort(before1.apply(a), before2.applyAsFloat(b), before3.applyAsFloat(c));
}
/**
* Returns a composed {@link TriBooleanToShortFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriBooleanToShortFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default TriBooleanToShortFunction composeFromBoolean(@Nonnull final BooleanFunction<? extends T> before1,
@Nonnull final BooleanToFloatFunction before2, @Nonnull final BooleanToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriByteToShortFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriByteToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default TriByteToShortFunction composeFromByte(@Nonnull final ByteFunction<? extends T> before1,
@Nonnull final ByteToFloatFunction before2, @Nonnull final ByteToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriCharToShortFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriCharToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default TriCharToShortFunction composeFromChar(@Nonnull final CharFunction<? extends T> before1,
@Nonnull final CharToFloatFunction before2, @Nonnull final CharToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriDoubleToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriDoubleToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default TriDoubleToShortFunction composeFromDouble(@Nonnull final DoubleFunction<? extends T> before1,
@Nonnull final DoubleToFloatFunction before2, @Nonnull final DoubleToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriFloatToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second operator to apply before this function is applied
* @param before3 The third operator to apply before this function is applied
* @return A composed {@code TriFloatToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default TriFloatToShortFunction composeFromFloat(@Nonnull final FloatFunction<? extends T> before1,
@Nonnull final FloatUnaryOperator before2, @Nonnull final FloatUnaryOperator before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriIntToShortFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriIntToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default TriIntToShortFunction composeFromInt(@Nonnull final IntFunction<? extends T> before1,
@Nonnull final IntToFloatFunction before2, @Nonnull final IntToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link TriLongToShortFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code TriLongToShortFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default TriLongToShortFunction composeFromLong(@Nonnull final LongFunction<? extends T> before1,
@Nonnull final LongToFloatFunction before2, @Nonnull final LongToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link ShortTernaryOperator} that first applies the {@code before} functions to its input, and
* then applies this function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ShortTernaryOperator} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ShortTernaryOperator composeFromShort(@Nonnull final ShortFunction<? extends T> before1,
@Nonnull final ShortToFloatFunction before2, @Nonnull final ShortToFloatFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsShort(before1.apply(value1), before2.applyAsFloat(value2),
before3.applyAsFloat(value3));
}
/**
* Returns a composed {@link ObjBiFloatFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> ObjBiFloatFunction<T, S> andThen(@Nonnull final ShortFunction<? extends S> after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.apply(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatPredicate} that first applies this function to its input, and then applies
* the {@code after} predicate to the result. If evaluation of either operation throws an exception, it is relayed
* to the caller of the composed operation. This method is just convenience, to provide the ability to transform
* this primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code ObjBiFloatPredicate} that first applies this function to its input, and then applies
* the {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ObjBiFloatPredicate<T> andThenToBoolean(@Nonnull final ShortPredicate after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.test(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToByteFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToByteFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ObjBiFloatToByteFunction<T> andThenToByte(@Nonnull final ShortToByteFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsByte(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToCharFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToCharFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ObjBiFloatToCharFunction<T> andThenToChar(@Nonnull final ShortToCharFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsChar(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code double}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ObjBiFloatToDoubleFunction<T> andThenToDouble(@Nonnull final ShortToDoubleFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsDouble(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ObjBiFloatToFloatFunction<T> andThenToFloat(@Nonnull final ShortToFloatFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsFloat(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ObjBiFloatToIntFunction<T> andThenToInt(@Nonnull final ShortToIntFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsInt(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToLongFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code long}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjBiFloatToLongFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ObjBiFloatToLongFunction<T> andThenToLong(@Nonnull final ShortToLongFunction after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsLong(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatToShortFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code short}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code ObjBiFloatToShortFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ObjBiFloatToShortFunction<T> andThenToShort(@Nonnull final ShortUnaryOperator after) {
Objects.requireNonNull(after);
return (t, value1, value2) -> after.applyAsShort(applyAsShort(t, value1, value2));
}
/**
* Returns a composed {@link ObjBiFloatConsumer} that fist applies this function to its input, and then consumes the
* result using the given {@link ShortConsumer}. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code ObjBiFloatConsumer} that first applies this function to its input, and then consumes
* the result using the given {@code ShortConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ObjBiFloatConsumer<T> consume(@Nonnull final ShortConsumer consumer) {
Objects.requireNonNull(consumer);
return (t, value1, value2) -> consumer.accept(applyAsShort(t, value1, value2));
}
/**
* Returns a memoized (caching) version of this {@link ObjBiFloatToShortFunction}. Whenever it is called, the
* mapping between the input parameters and the return value is preserved in a cache, making subsequent calls
* returning the memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code ObjBiFloatToShortFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default ObjBiFloatToShortFunction<T> memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Triple<T, Float, Float>, Short> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (ObjBiFloatToShortFunction<T> & Memoized) (t, value1, value2) -> {
final short returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Triple.of(t, value1, value2),
key -> applyAsShort(key.getLeft(), key.getMiddle(),
key.getRight()));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link TriFunction} which represents this {@link ObjBiFloatToShortFunction}. Thereby the
* primitive input argument for this function is autoboxed. This method provides the possibility to use this {@code
* ObjBiFloatToShortFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code TriFunction} which represents this {@code ObjBiFloatToShortFunction}.
*/
@Nonnull
default TriFunction<T, Float, Float, Short> boxed() {
return this::applyAsShort;
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.codeStyle;
import com.intellij.application.options.IndentOptionsEditor;
import com.intellij.application.options.SmartIndentOptionsEditor;
import com.intellij.lang.Language;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsCustomizable;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.LanguageCodeStyleSettingsProvider;
import com.intellij.ui.components.JBLabel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.GroovyFileType;
import javax.swing.*;
/**
* @author Rustam Vishnyakov
*/
public class GroovyLanguageCodeStyleSettingsProvider extends LanguageCodeStyleSettingsProvider {
public static final String ABSOLUTE = "Absolute";
public static final String RELATIVE = "Indent statements after label";
public static final String RELATIVE_REVERSED = "Indent labels";
@NotNull
@Override
public Language getLanguage() {
return GroovyFileType.GROOVY_LANGUAGE;
}
@Override
public void customizeSettings(@NotNull CodeStyleSettingsCustomizable consumer,
@NotNull SettingsType settingsType) {
if (settingsType == SettingsType.WRAPPING_AND_BRACES_SETTINGS) {
consumer.showStandardOptions(
"KEEP_LINE_BREAKS",
"KEEP_FIRST_COLUMN_COMMENT",
"KEEP_CONTROL_STATEMENT_IN_ONE_LINE",
"KEEP_MULTIPLE_EXPRESSIONS_IN_ONE_LINE",
"KEEP_SIMPLE_BLOCKS_IN_ONE_LINE",
"KEEP_SIMPLE_METHODS_IN_ONE_LINE",
"KEEP_SIMPLE_CLASSES_IN_ONE_LINE",
"WRAP_LONG_LINES",
"CLASS_BRACE_STYLE",
"METHOD_BRACE_STYLE",
"BRACE_STYLE",
"EXTENDS_LIST_WRAP",
"ALIGN_MULTILINE_EXTENDS_LIST",
"EXTENDS_KEYWORD_WRAP",
"THROWS_LIST_WRAP",
"ALIGN_MULTILINE_THROWS_LIST",
"ALIGN_THROWS_KEYWORD",
"THROWS_KEYWORD_WRAP",
"METHOD_PARAMETERS_WRAP",
"ALIGN_MULTILINE_PARAMETERS",
"METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE",
"METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE",
"CALL_PARAMETERS_WRAP",
"ALIGN_MULTILINE_PARAMETERS_IN_CALLS",
"PREFER_PARAMETERS_WRAP",
"CALL_PARAMETERS_LPAREN_ON_NEXT_LINE",
"CALL_PARAMETERS_RPAREN_ON_NEXT_LINE",
"ALIGN_MULTILINE_METHOD_BRACKETS",
"METHOD_CALL_CHAIN_WRAP",
"ALIGN_MULTILINE_CHAINED_METHODS",
"ALIGN_GROUP_FIELD_DECLARATIONS",
"IF_BRACE_FORCE",
"ELSE_ON_NEW_LINE",
"SPECIAL_ELSE_IF_TREATMENT",
"FOR_STATEMENT_WRAP",
"ALIGN_MULTILINE_FOR",
"FOR_STATEMENT_LPAREN_ON_NEXT_LINE",
"FOR_STATEMENT_RPAREN_ON_NEXT_LINE",
"FOR_BRACE_FORCE",
"WHILE_BRACE_FORCE",
//"DOWHILE_BRACE_FORCE",
//"WHILE_ON_NEW_LINE",
"INDENT_CASE_FROM_SWITCH",
//"RESOURCE_LIST_WRAP",
//"ALIGN_MULTILINE_RESOURCES",
//"RESOURCE_LIST_LPAREN_ON_NEXT_LINE",
//"RESOURCE_LIST_RPAREN_ON_NEXT_LINE",
"CATCH_ON_NEW_LINE",
"FINALLY_ON_NEW_LINE",
"BINARY_OPERATION_WRAP",
"ALIGN_MULTILINE_BINARY_OPERATION",
//"BINARY_OPERATION_SIGN_ON_NEXT_LINE",
//"ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION",
"PARENTHESES_EXPRESSION_LPAREN_WRAP",
"PARENTHESES_EXPRESSION_RPAREN_WRAP",
"ASSIGNMENT_WRAP",
"ALIGN_MULTILINE_ASSIGNMENT",
//"PLACE_ASSIGNMENT_SIGN_ON_NEXT_LINE",
"TERNARY_OPERATION_WRAP",
"ALIGN_MULTILINE_TERNARY_OPERATION",
//"TERNARY_OPERATION_SIGNS_ON_NEXT_LINE",
//"ARRAY_INITIALIZER_WRAP",
//"ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION",
//"ARRAY_INITIALIZER_LBRACE_ON_NEXT_LINE",
//"ARRAY_INITIALIZER_RBRACE_ON_NEXT_LINE",
"MODIFIER_LIST_WRAP",
"ASSERT_STATEMENT_WRAP",
//"ASSERT_STATEMENT_COLON_ON_NEXT_LINE",
"CLASS_ANNOTATION_WRAP",
"METHOD_ANNOTATION_WRAP",
"FIELD_ANNOTATION_WRAP",
"PARAMETER_ANNOTATION_WRAP",
"VARIABLE_ANNOTATION_WRAP",
"ENUM_CONSTANTS_WRAP"
);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "USE_FLYING_GEESE_BRACES", "Use flying geese braces",
CodeStyleSettingsCustomizable.WRAPPING_BRACES);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "ALIGN_MULTILINE_LIST_OR_MAP", "Align when multiple", "List and map literals");
consumer.showCustomOption(GroovyCodeStyleSettings.class, "ALIGN_NAMED_ARGS_IN_MAP", "Align multiline named arguments", "List and map literals");
consumer.showCustomOption(GroovyCodeStyleSettings.class, "IMPORT_ANNOTATION_WRAP", "Import annotations", null,
CodeStyleSettingsCustomizable.OptionAnchor.AFTER, "VARIABLE_ANNOTATION_WRAP",
CodeStyleSettingsCustomizable.WRAP_OPTIONS, CodeStyleSettingsCustomizable.WRAP_VALUES);
return;
}
if (settingsType == SettingsType.SPACING_SETTINGS) {
consumer.showStandardOptions("INSERT_FIRST_SPACE_IN_LINE",
"SPACE_AROUND_ASSIGNMENT_OPERATORS",
"SPACE_AROUND_LOGICAL_OPERATORS",
"SPACE_AROUND_EQUALITY_OPERATORS",
"SPACE_AROUND_RELATIONAL_OPERATORS",
"SPACE_AROUND_BITWISE_OPERATORS",
"SPACE_AROUND_ADDITIVE_OPERATORS",
"SPACE_AROUND_MULTIPLICATIVE_OPERATORS",
"SPACE_AROUND_SHIFT_OPERATORS",
"SPACE_AROUND_UNARY_OPERATOR",
"SPACE_AFTER_COMMA",
"SPACE_AFTER_COMMA_IN_TYPE_ARGUMENTS",
"SPACE_BEFORE_COMMA",
"SPACE_AFTER_SEMICOLON",
"SPACE_BEFORE_SEMICOLON",
"SPACE_WITHIN_PARENTHESES",
"SPACE_WITHIN_EMPTY_METHOD_CALL_PARENTHESES",
"SPACE_WITHIN_METHOD_CALL_PARENTHESES",
"SPACE_WITHIN_METHOD_PARENTHESES",
"SPACE_WITHIN_IF_PARENTHESES",
"SPACE_WITHIN_WHILE_PARENTHESES",
"SPACE_WITHIN_FOR_PARENTHESES",
// "SPACE_WITHIN_TRY_PARENTHESES",
"SPACE_WITHIN_CATCH_PARENTHESES",
"SPACE_WITHIN_SWITCH_PARENTHESES",
"SPACE_WITHIN_SYNCHRONIZED_PARENTHESES",
"SPACE_WITHIN_CAST_PARENTHESES",
"SPACE_WITHIN_BRACKETS",
"SPACE_WITHIN_BRACES",
// "SPACE_WITHIN_ARRAY_INITIALIZER_BRACES",
"SPACE_AFTER_TYPE_CAST",
"SPACE_BEFORE_METHOD_CALL_PARENTHESES",
"SPACE_BEFORE_METHOD_PARENTHESES",
"SPACE_BEFORE_IF_PARENTHESES",
"SPACE_BEFORE_WHILE_PARENTHESES",
"SPACE_BEFORE_FOR_PARENTHESES",
// "SPACE_BEFORE_TRY_PARENTHESES",
"SPACE_BEFORE_CATCH_PARENTHESES",
"SPACE_BEFORE_SWITCH_PARENTHESES",
"SPACE_BEFORE_SYNCHRONIZED_PARENTHESES",
"SPACE_BEFORE_CLASS_LBRACE",
"SPACE_BEFORE_METHOD_LBRACE",
"SPACE_BEFORE_IF_LBRACE",
"SPACE_BEFORE_ELSE_LBRACE",
"SPACE_BEFORE_WHILE_LBRACE",
"SPACE_BEFORE_FOR_LBRACE",
// "SPACE_BEFORE_DO_LBRACE",
"SPACE_BEFORE_SWITCH_LBRACE",
"SPACE_BEFORE_TRY_LBRACE",
"SPACE_BEFORE_CATCH_LBRACE",
"SPACE_BEFORE_FINALLY_LBRACE",
"SPACE_BEFORE_SYNCHRONIZED_LBRACE",
// "SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE",
// "SPACE_BEFORE_ANNOTATION_ARRAY_INITIALIZER_LBRACE",
"SPACE_BEFORE_ELSE_KEYWORD",
// "SPACE_BEFORE_WHILE_KEYWORD",
"SPACE_BEFORE_CATCH_KEYWORD",
"SPACE_BEFORE_FINALLY_KEYWORD",
"SPACE_BEFORE_QUEST",
"SPACE_AFTER_QUEST",
"SPACE_BEFORE_COLON",
"SPACE_AFTER_COLON",
"SPACE_BEFORE_TYPE_PARAMETER_LIST",
"SPACE_BEFORE_ANOTATION_PARAMETER_LIST",
"SPACE_WITHIN_ANNOTATION_PARENTHESES"
);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "SPACE_IN_NAMED_ARGUMENT", "In named argument after ':'",
CodeStyleSettingsCustomizable.SPACES_OTHER);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "SPACE_WITHIN_LIST_OR_MAP", "List and maps literals", CodeStyleSettingsCustomizable.SPACES_WITHIN);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "SPACE_BEFORE_CLOSURE_LBRACE", "Closure left brace in method calls", CodeStyleSettingsCustomizable.SPACES_BEFORE_LEFT_BRACE);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "SPACE_WITHIN_GSTRING_INJECTION_BRACES", "GString injection braces", CodeStyleSettingsCustomizable.SPACES_WITHIN);
consumer.showCustomOption(GroovyCodeStyleSettings.class, "SPACE_WITHIN_TUPLE_EXPRESSION", "Tuple assignment expression",
CodeStyleSettingsCustomizable.SPACES_WITHIN);
return;
}
if (settingsType == SettingsType.BLANK_LINES_SETTINGS) {
consumer.showStandardOptions(
"KEEP_BLANK_LINES_IN_DECLARATIONS",
"KEEP_BLANK_LINES_IN_CODE",
"KEEP_BLANK_LINES_BEFORE_RBRACE",
"BLANK_LINES_BEFORE_PACKAGE",
"BLANK_LINES_AFTER_PACKAGE",
"BLANK_LINES_BEFORE_IMPORTS",
"BLANK_LINES_AFTER_IMPORTS",
"BLANK_LINES_AROUND_CLASS",
"BLANK_LINES_AFTER_CLASS_HEADER",
//"BLANK_LINES_AFTER_ANONYMOUS_CLASS_HEADER",
"BLANK_LINES_AROUND_FIELD_IN_INTERFACE",
"BLANK_LINES_AROUND_FIELD",
"BLANK_LINES_AROUND_METHOD_IN_INTERFACE",
"BLANK_LINES_AROUND_METHOD",
"BLANK_LINES_BEFORE_METHOD_BODY"
);
return;
}
consumer.showAllStandardOptions();
}
@Override
public CommonCodeStyleSettings getDefaultCommonSettings() {
CommonCodeStyleSettings defaultSettings = new CommonCodeStyleSettings(GroovyFileType.GROOVY_LANGUAGE);
defaultSettings.initIndentOptions();
defaultSettings.SPACE_WITHIN_BRACES = true;
defaultSettings.KEEP_SIMPLE_CLASSES_IN_ONE_LINE = true;
defaultSettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE = true;
return defaultSettings;
}
@Override
public String getCodeSample(@NotNull SettingsType settingsType) {
switch (settingsType) {
case INDENT_SETTINGS: return INDENT_OPTIONS_SAMPLE;
case SPACING_SETTINGS: return SPACING_SAMPLE;
case WRAPPING_AND_BRACES_SETTINGS: return WRAPPING_CODE_SAMPLE;
case BLANK_LINES_SETTINGS: return BLANK_LINE_SAMPLE;
default:
return "";
}
}
@Override
public IndentOptionsEditor getIndentOptionsEditor() {
return new SmartIndentOptionsEditor() {
private JTextField myLabelIndent;
private JLabel myLabelIndentLabel;
private JComboBox myLabelIndentStyle;
private JBLabel myStyleLabel;
protected void addComponents() {
super.addComponents();
myLabelIndent = new JTextField(4);
add(myLabelIndentLabel = new JLabel(ApplicationBundle.message("editbox.indent.label.indent")), myLabelIndent);
myStyleLabel = new JBLabel("Label indent style:");
myLabelIndentStyle = new JComboBox(new Object[] {ABSOLUTE, RELATIVE, RELATIVE_REVERSED});
add(myStyleLabel, myLabelIndentStyle);
}
public boolean isModified(final CodeStyleSettings settings, final CommonCodeStyleSettings.IndentOptions options) {
boolean isModified = super.isModified(settings, options);
isModified |= isFieldModified(myLabelIndent, options.LABEL_INDENT_SIZE);
isModified |= isLabelStyleModified(options.LABEL_INDENT_ABSOLUTE, settings.getCustomSettings(GroovyCodeStyleSettings.class).INDENT_LABEL_BLOCKS);
return isModified;
}
private boolean isLabelStyleModified(boolean absolute, boolean relative) {
if (absolute) {
return !ABSOLUTE.equals(myLabelIndentStyle.getSelectedItem());
}
else if (relative) {
return !RELATIVE.equals(myLabelIndentStyle.getSelectedItem());
}
else {
return !RELATIVE_REVERSED.equals(myLabelIndentStyle.getSelectedItem());
}
}
public void apply(final CodeStyleSettings settings, final CommonCodeStyleSettings.IndentOptions options) {
super.apply(settings, options);
options.LABEL_INDENT_SIZE = getFieldValue(myLabelIndent, Integer.MIN_VALUE, options.LABEL_INDENT_SIZE);
options.LABEL_INDENT_ABSOLUTE = ABSOLUTE.equals(myLabelIndentStyle.getSelectedItem());
settings.getCustomSettings(GroovyCodeStyleSettings.class).INDENT_LABEL_BLOCKS = RELATIVE
.equals(myLabelIndentStyle.getSelectedItem());
}
public void reset(@NotNull final CodeStyleSettings settings, @NotNull final CommonCodeStyleSettings.IndentOptions options) {
super.reset(settings, options);
myLabelIndent.setText(Integer.toString(options.LABEL_INDENT_SIZE));
if (options.LABEL_INDENT_ABSOLUTE) {
myLabelIndentStyle.setSelectedItem(ABSOLUTE);
}
else if(settings.getCustomSettings(GroovyCodeStyleSettings.class).INDENT_LABEL_BLOCKS) {
myLabelIndentStyle.setSelectedItem(RELATIVE);
}
else {
myLabelIndentStyle.setSelectedItem(RELATIVE_REVERSED);
}
}
public void setEnabled(final boolean enabled) {
super.setEnabled(enabled);
myLabelIndent.setEnabled(enabled);
myLabelIndentLabel.setEnabled(enabled);
myStyleLabel.setEnabled(enabled);
myLabelIndentStyle.setEnabled(enabled);
}
};
}
private final static String INDENT_OPTIONS_SAMPLE =
/*
"topLevelLabel:\n" +
"foo(42)\n" +
*/
"def foo(int arg) {\n" +
" label1:\n" +
" for (i in 1..10) {\n" +
" label2:\n" +
" foo(i)\n" +
" }\n" +
" return Math.max(arg,\n" +
" 0)\n" +
"}\n\n" +
"class HelloSpock extends spock.lang.Specification {\n" +
" def \"length of Spock's and his friends' names\"() {\n" +
" expect:\n" +
" name.size() == length\n" +
"\n" +
" where:\n" +
" name | length | foo\n" +
" \"Spock\" | 5\n" +
" \"Kirk\" | 4 | xxx | yyy\n" +
" \"Scotty\" | 6 |dddddddddd | fff\n" +
"\n" +
" //aaa\n" +
" a | b | c\n" +
" }\n" +
"}\n";
private final static String SPACING_SAMPLE =
"class Foo {\n" +
" @Annotation(param=\"foo\")\n"+
" @Ann([1, 2])\n" +
" public static <T1, T2> void foo(int x, int y) {\n" +
" for (int i = 0; i < x; i++) {\n" +
" y += (y ^ 0x123) << 2\n" +
" }\n" +
" \n" +
" 10.times {\n" +
" print it\n" +
" }\n" +
" int j = 0\n" +
" while (j < 10) {\n" +
" try {\n" +
" if (0 < x && x < 10) {\n" +
" while (x != y) {\n" +
" x = f(x * 3 + 5)\n" +
" }\n" +
" } else {\n" +
" synchronized (this) {\n" +
" switch (e.getCode()) {\n" +
" //...\n" +
" }\n" +
" }\n" +
" }\n" +
" } catch (MyException e) {\n" +
" logError(method: \"foo\", exception: e)\n" +
" } finally {\n" +
" int[] arr = (int[]) g(y)\n" +
" x = y >= 0 ? arr[y] : -1\n" +
" y = [1, 2, 3] ?: 4\n" +
" }\n" +
" }\n" +
" def cl = {Math.sin(it)}\n" +
" print ckl(2) " +
" }\n" +
" \n" +
" def inject(x) {\"cos($x) = ${Math.cos(x)}\"} \n" +
"\n" +
"}";
private static final String WRAPPING_CODE_SAMPLE =
"/*\n" +
" * This is a sample file.\n" +
" */\n" +
"\n" +
"public class ThisIsASampleClass extends C1 implements I1, I2, I3, I4, I5 {\n" +
" private int f1 = 1\n" +
" private String field2 = \"\"\n" +
" public void foo1(int i1, int i2, int i3, int i4, int i5, int i6, int i7) {}\n" +
" public static void longerMethod() throws Exception1, Exception2, Exception3 {\n" +
"// todo something\n" +
" int\n" +
"i = 0\n" +
" int var1 = 1; int var2 = 2\n" +
" foo1(0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057)\n" +
" int x = (3 + 4 + 5 + 6) * (7 + 8 + 9 + 10) * (11 + 12 + 13 + 14 + 0xFFFFFFFF)\n" +
" String s1, s2, s3\n" +
" s1 = s2 = s3 = \"012345678901456\"\n" +
" assert i + j + k + l + n+ m <= 2 : \"assert description\"\n" +
" int y = 2 > 3 ? 7 + 8 + 9 : 11 + 12 + 13\n" +
" super.getFoo().foo().getBar().bar()\n" +
"\n" +
" label: \n" +
" if (2 < 3) return else if (2 > 3) return else return\n" +
" for (int i = 0; i < 0xFFFFFF; i += 2) System.out.println(i)\n" +
" print([\n" +
" l1: expr1,\n" +
" label2: expr2\n" +
" ])\n" +
" while (x < 50000) x++\n" +
" switch (a) {\n" +
" case 0:\n" +
" doCase0()\n" +
" break\n" +
" default:\n" +
" doDefault()\n" +
" }\n" +
" try {\n" +
" doSomething()\n" +
" } catch (Exception e) {\n" +
" processException(e)\n" +
" } finally {\n" +
" processFinally()\n" +
" }\n" +
" }\n" +
" public static void test() \n" +
" throws Exception { \n" +
" foo.foo().bar(\"arg1\", \n" +
" \"arg2\") \n" +
" new Object() {}\n" +
" } \n" +
" class TestInnerClass {}\n" +
" interface TestInnerInterface {}\n" +
"}\n" +
"\n" +
"enum Breed {\n" +
" Dalmatian(), Labrador(), Dachshund()\n" +
"}\n" +
"\n" +
"@Annotation1 @Annotation2 @Annotation3(param1=\"value1\", param2=\"value2\") @Annotation4 class Foo {\n" +
" @Annotation1 @Annotation3(param1=\"value1\", param2=\"value2\") public static void foo(){\n" +
" }\n" +
" @Annotation1 @Annotation3(param1=\"value1\", param2=\"value2\") public static int myFoo\n" +
" public void method(@Annotation1 @Annotation3(param1=\"value1\", param2=\"value2\") final int param){\n" +
" @Annotation1 @Annotation3(param1=\"value1\", param2=\"value2\") final int localVariable\n" +
" }\n" +
"}";
private static final String BLANK_LINE_SAMPLE =
"/*\n" +
" * This is a sample file.\n" +
" */\n" +
"package com.intellij.samples\n" +
"\n" +
"import com.intellij.idea.Main\n" +
"\n" +
"import javax.swing.*\n" +
"import java.util.Vector\n" +
"\n" +
"public class Foo {\n" +
" private int field1\n" +
" private int field2\n" +
"\n" +
" public void foo1() {\n" +
" new Runnable() {\n" +
" public void run() {\n" +
" }\n" +
" }\n" +
" }\n" +
"\n" +
" public class InnerClass {\n" +
" }\n" +
"}\n" +
"class AnotherClass {\n" +
"}\n" +
"interface TestInterface {\n" +
" int MAX = 10\n" +
" int MIN = 1\n" +
" def method1()\n" +
" void method2()\n" +
"}";
}
| |
package com.linkedin.metadata.resources.dataprocess;
import com.linkedin.common.urn.DataProcessUrn;
import com.linkedin.dataprocess.DataProcess;
import com.linkedin.dataprocess.DataProcessInfo;
import com.linkedin.dataprocess.DataProcessKey;
import com.linkedin.metadata.aspect.DataProcessAspect;
import com.linkedin.metadata.dao.BaseLocalDAO;
import com.linkedin.metadata.dao.BaseSearchDAO;
import com.linkedin.metadata.dao.utils.ModelUtils;
import com.linkedin.metadata.query.AutoCompleteResult;
import com.linkedin.metadata.query.Filter;
import com.linkedin.metadata.query.SearchResultMetadata;
import com.linkedin.metadata.query.SortCriterion;
import com.linkedin.metadata.restli.BackfillResult;
import com.linkedin.metadata.restli.BaseSearchableEntityResource;
import com.linkedin.metadata.search.DataProcessDocument;
import com.linkedin.metadata.snapshot.DataProcessSnapshot;
import com.linkedin.parseq.Task;
import com.linkedin.restli.common.ComplexResourceKey;
import com.linkedin.restli.common.EmptyRecord;
import com.linkedin.restli.server.CollectionResult;
import com.linkedin.restli.server.PagingContext;
import com.linkedin.restli.server.annotations.Action;
import com.linkedin.restli.server.annotations.ActionParam;
import com.linkedin.restli.server.annotations.Finder;
import com.linkedin.restli.server.annotations.Optional;
import com.linkedin.restli.server.annotations.PagingContextParam;
import com.linkedin.restli.server.annotations.QueryParam;
import com.linkedin.restli.server.annotations.RestLiCollection;
import com.linkedin.restli.server.annotations.RestMethod;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.linkedin.metadata.restli.RestliConstants.*;
@RestLiCollection(name = "dataProcesses", namespace = "com.linkedin.dataprocess", keyName = "dataprocess")
public class DataProcesses extends BaseSearchableEntityResource<
// @formatter:off
ComplexResourceKey<DataProcessKey, EmptyRecord>,
DataProcess,
DataProcessUrn,
DataProcessSnapshot,
DataProcessAspect,
DataProcessDocument> {
// @formatter:on
public DataProcesses() {
super(DataProcessSnapshot.class, DataProcessAspect.class);
}
@Inject
@Named("dataProcessDAO")
private BaseLocalDAO<DataProcessAspect, DataProcessUrn> _localDAO;
@Inject
@Named("dataProcessSearchDAO")
private BaseSearchDAO _esSearchDAO;
@Nonnull
@Override
protected BaseSearchDAO<DataProcessDocument> getSearchDAO() {
return _esSearchDAO;
}
@Nonnull
@Override
protected BaseLocalDAO<DataProcessAspect, DataProcessUrn> getLocalDAO() {
return _localDAO;
}
@Nonnull
@Override
protected DataProcessUrn createUrnFromString(@Nonnull String urnString) throws Exception {
return DataProcessUrn.createFromString(urnString);
}
@Nonnull
@Override
protected DataProcessUrn toUrn(@Nonnull ComplexResourceKey<DataProcessKey, EmptyRecord> key) {
return new DataProcessUrn(key.getKey().getOrchestrator(), key.getKey().getName(), key.getKey().getOrigin());
}
@Nonnull
@Override
protected ComplexResourceKey<DataProcessKey, EmptyRecord> toKey(@Nonnull DataProcessUrn urn) {
return new ComplexResourceKey<>(
new DataProcessKey()
.setOrchestrator(urn.getOrchestratorEntity())
.setName(urn.getNameEntity())
.setOrigin(urn.getOriginEntity()),
new EmptyRecord());
}
@Nonnull
@Override
protected DataProcess toValue(@Nonnull DataProcessSnapshot processSnapshot) {
final DataProcess value = new DataProcess()
.setOrchestrator(processSnapshot.getUrn().getOrchestratorEntity())
.setName(processSnapshot.getUrn().getNameEntity())
.setOrigin(processSnapshot.getUrn().getOriginEntity());
ModelUtils.getAspectsFromSnapshot(processSnapshot).forEach(aspect -> {
if (aspect instanceof DataProcessInfo) {
DataProcessInfo processInfo = DataProcessInfo.class.cast(aspect);
value.setDataProcessInfo(processInfo);
}
});
return value;
}
@Nonnull
private DataProcessInfo getDataProcessInfoAspect(@Nonnull DataProcess process) {
final DataProcessInfo processInfo = new DataProcessInfo();
if (process.getDataProcessInfo().hasInputs()) {
processInfo.setInputs(process.getDataProcessInfo().getInputs());
}
if (process.getDataProcessInfo().hasOutputs()) {
processInfo.setOutputs(process.getDataProcessInfo().getOutputs());
}
return processInfo;
}
@Nonnull
@Override
protected DataProcessSnapshot toSnapshot(@Nonnull DataProcess process, @Nonnull DataProcessUrn urn) {
final List<DataProcessAspect> aspects = new ArrayList<>();
aspects.add(ModelUtils.newAspectUnion(DataProcessAspect.class, getDataProcessInfoAspect(process)));
return ModelUtils.newSnapshot(DataProcessSnapshot.class, urn, aspects);
}
@RestMethod.Get
@Override
@Nonnull
public Task<DataProcess> get(@Nonnull ComplexResourceKey<DataProcessKey, EmptyRecord> key,
@QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) {
return super.get(key, aspectNames);
}
@RestMethod.BatchGet
@Override
@Nonnull
public Task<Map<ComplexResourceKey<DataProcessKey, EmptyRecord>, DataProcess>> batchGet(
@Nonnull Set<ComplexResourceKey<DataProcessKey, EmptyRecord>> keys,
@QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) {
return super.batchGet(keys, aspectNames);
}
@RestMethod.GetAll
@Nonnull
public Task<List<DataProcess>> getAll(@PagingContextParam @Nonnull PagingContext pagingContext,
@QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames,
@QueryParam(PARAM_FILTER) @Optional @Nullable Filter filter,
@QueryParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion) {
return super.getAll(pagingContext, aspectNames, filter, sortCriterion);
}
@Finder(FINDER_SEARCH)
@Override
@Nonnull
public Task<CollectionResult<DataProcess, SearchResultMetadata>> search(@QueryParam(PARAM_INPUT) @Nonnull String input,
@QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames,
@QueryParam(PARAM_FILTER) @Optional @Nullable Filter filter,
@QueryParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion,
@PagingContextParam @Nonnull PagingContext pagingContext) {
return super.search(input, aspectNames, filter, sortCriterion, pagingContext);
}
@Action(name = ACTION_AUTOCOMPLETE)
@Override
@Nonnull
public Task<AutoCompleteResult> autocomplete(@ActionParam(PARAM_QUERY) @Nonnull String query,
@ActionParam(PARAM_FIELD) @Nullable String field, @ActionParam(PARAM_FILTER) @Nullable Filter filter,
@ActionParam(PARAM_LIMIT) int limit) {
return super.autocomplete(query, field, filter, limit);
}
@Action(name = ACTION_INGEST)
@Override
@Nonnull
public Task<Void> ingest(@ActionParam(PARAM_SNAPSHOT) @Nonnull DataProcessSnapshot snapshot) {
return super.ingest(snapshot);
}
@Action(name = ACTION_GET_SNAPSHOT)
@Override
@Nonnull
public Task<DataProcessSnapshot> getSnapshot(@ActionParam(PARAM_URN) @Nonnull String urnString,
@ActionParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) {
return super.getSnapshot(urnString, aspectNames);
}
@Action(name = ACTION_BACKFILL)
@Override
@Nonnull
public Task<BackfillResult> backfill(@ActionParam(PARAM_URN) @Nonnull String urnString,
@ActionParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) {
return super.backfill(urnString, aspectNames);
}
}
| |
package ca.uhn.fhir.rest.server;
import static org.junit.Assert.*;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.annotation.Transaction;
import ca.uhn.fhir.rest.annotation.TransactionParam;
import ca.uhn.fhir.util.PortUtil;
/**
* Created by dsotnikov on 2/25/2014.
*/
public class TransactionWithBundleResourceParamTest {
@Test
public void testIt() {
}
private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = new FhirContext();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TransactionWithBundleResourceParamTest.class);
private static int ourPort;
private static boolean ourReturnOperationOutcome;
private static Server ourServer;
@Before
public void before() {
ourReturnOperationOutcome = false;
}
@Test
public void testTransaction() throws Exception {
Bundle b = new Bundle();
InstantDt nowInstant = InstantDt.withCurrentTime();
Patient p1 = new Patient();
p1.addName().addFamily("Family1");
Entry entry = b.addEntry();
p1.getId().setValue("1");
entry.setResource(p1);
Patient p2 = new Patient();
p2.addName().addFamily("Family2");
entry = b.addEntry();
p2.getId().setValue("2");
entry.setResource(p2);
Entry deletedEntry = b.addEntry();
deletedEntry.getTransaction().setMethod(HTTPVerbEnum.DELETE);
deletedEntry.getTransaction().setUrl("http://base.com/Patient/123");
String bundleString = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(b);
ourLog.info(bundleString);
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/");
httpPost.addHeader("Accept", Constants.CT_ATOM_XML + "; pretty=true");
httpPost.setEntity(new StringEntity(bundleString, ContentType.create(Constants.CT_ATOM_XML, "UTF-8")));
HttpResponse status = ourClient.execute(httpPost);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
ourLog.info(responseContent);
Bundle bundle = ourCtx.newXmlParser().parseResource(Bundle.class, responseContent);
assertEquals(3, bundle.getEntry().size());
Entry entry0 = bundle.getEntry().get(0);
assertEquals("Patient/81/_history/91", entry0.getTransactionResponse().getLocation());
Entry entry1 = bundle.getEntry().get(1);
assertEquals( "Patient/82/_history/92", entry1.getTransactionResponse().getLocation());
Entry entry2 = bundle.getEntry().get(2);
assertEquals("Patient/123/_history/93", entry2.getTransactionResponse().getLocation());
}
@Test
public void testTransactionWithOperationOutcome() throws Exception {
ourReturnOperationOutcome = true;
Bundle b = new Bundle();
InstantDt nowInstant = InstantDt.withCurrentTime();
Patient p1 = new Patient();
p1.addName().addFamily("Family1");
Entry entry = b.addEntry();
p1.getId().setValue("1");
entry.setResource(p1);
Patient p2 = new Patient();
p2.addName().addFamily("Family2");
entry = b.addEntry();
p2.getId().setValue("2");
entry.setResource(p2);
Entry deletedEntry = b.addEntry();
deletedEntry.getTransaction().setMethod(HTTPVerbEnum.DELETE);
deletedEntry.getTransaction().setUrl(new IdDt("Patient/3"));
String bundleString = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(b);
ourLog.info(bundleString);
String base = "http://localhost:" + ourPort + "/";
HttpPost httpPost = new HttpPost(base);
httpPost.addHeader("Accept", Constants.CT_ATOM_XML + "; pretty=true");
httpPost.setEntity(new StringEntity(bundleString, ContentType.create(Constants.CT_ATOM_XML, "UTF-8")));
HttpResponse status = ourClient.execute(httpPost);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
ourLog.info(responseContent);
Bundle bundle = new FhirContext().newXmlParser().parseResource(Bundle.class, responseContent);
assertEquals(4, bundle.getEntry().size());
assertEquals(OperationOutcome.class, bundle.getEntry().get(0).getResource().getClass());
Entry entry0 = bundle.getEntry().get(1);
assertEquals("Patient/81/_history/91", entry0.getTransactionResponse().getLocation());
Entry entry1 = bundle.getEntry().get(2);
assertEquals("Patient/82/_history/92", entry1.getTransactionResponse().getLocation());
Entry entry2 = bundle.getEntry().get(3);
assertEquals( "Patient/3/_history/93", entry2.getTransactionResponse().getLocation());
}
@AfterClass
public static void afterClass() throws Exception {
ourServer.stop();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyProvider patientProvider = new DummyProvider();
RestfulServer server = new RestfulServer();
server.setProviders(patientProvider);
org.eclipse.jetty.servlet.ServletContextHandler proxyHandler = new org.eclipse.jetty.servlet.ServletContextHandler();
proxyHandler.setContextPath("/");
ServletHolder handler = new ServletHolder();
handler.setServlet(server);
proxyHandler.addServlet(handler, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(500000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
/**
* Created by dsotnikov on 2/25/2014.
*/
public static class DummyProvider {
@Transaction
public Bundle transaction(@TransactionParam Bundle theResources) {
Bundle retVal = new Bundle();
if (ourReturnOperationOutcome) {
OperationOutcome oo = new OperationOutcome();
oo.addIssue().setDetails("AAAAA");
retVal.addEntry().setResource(oo);
}
int index = 1;
for (Entry nextEntry : theResources.getEntry()) {
String newId = "8" + Integer.toString(index);
if (nextEntry.getTransaction().getMethodElement().getValueAsEnum() == HTTPVerbEnum.DELETE) {
newId = new IdDt(nextEntry.getTransaction().getUrlElement()).getIdPart();
}
IdDt newIdDt = (new IdDt("Patient", newId, "9" + Integer.toString(index)));
retVal.addEntry().getTransactionResponse().setLocation(newIdDt.getValue());
index++;
}
return retVal;
}
}
}
| |
/**
* SIX VULN - http://code.google.com/p/six-vuln/
* Copyright (C) 2006
* National Institute of Advanced Industrial Science and Technology (AIST)
* Registration Number: H20PRO-863
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.go.aist.six.vuln.model.scap.cpe;
import java.util.Arrays;
import java.util.Iterator;
/**
* The CPE Name structure.
*
* @author Akihito Nakamura, AIST
* @version $Id: CpeName.java 536 2013-04-19 02:21:46Z nakamura5akihito@gmail.com $
*/
public final class CpeName
implements Iterable<String>
{
// /**
// * Logger.
// */
// private static Log _LOG = LogFactory.getLog( CpeName.class );
public static final int MAX_COMPONENTS = 7;
public static final String EMPTY = "";
public static final String PREFIX = "cpe:/";
private static final int _INDEX_PART_ = 0;
private static final int _INDEX_VENDOR_ = 1;
private static final int _INDEX_PRODUCT_ = 2;
private static final int _INDEX_VERSION_ = 3;
private static final int _INDEX_UPDATE_ = 4;
private static final int _INDEX_EDITION_ = 5;
private static final int _INDEX_LANGUAGE_ = 6;
private String _cpename;
private final String[] _components = new String[MAX_COMPONENTS];
/**
* Constructor.
*/
public CpeName()
{
}
/**
* Constructor.
*/
public CpeName(
final String cpename
)
{
setName( cpename );
}
/**
*/
public void setName(
final String cpename
)
{
if (cpename == null || !cpename.startsWith( PREFIX )) {
throw new IllegalArgumentException(
"invalid CPE name: " + cpename );
}
String[] components = cpename.substring( 5 ).split( ":" );
// _PREFIX_ "cpe:/".length = 5
// if (_LOG.isTraceEnabled()) {
// _LOG.trace( "components=" + Arrays.toString( components ) );
// }
if (components.length > MAX_COMPONENTS) {
throw new IllegalArgumentException(
"invalid CPE name: " + cpename );
}
_cpename = cpename;
int size = components.length;
for (int i = 0; i < size; i++) {
_setComponent( i, components[i] );
}
for (int i = 0; i < MAX_COMPONENTS; i++) {
if (i < size) {
_setComponent( i, components[i] );
} else {
_setComponent( i, EMPTY );
}
}
}
/**
*/
public String getName()
{
return _cpename;
}
/**
*
*/
private void _setComponent(
final int index,
final String value
)
{
_components[index] = value;
}
/**
*/
private String _getComponent(
final int index
)
{
final String value = _components[index];
return (value == null ? EMPTY : value);
}
/**
*/
public String getPart()
{
return _getComponent( _INDEX_PART_ );
}
/**
*/
public void setPart(
final String part
)
{
_setComponent( _INDEX_PART_, CpePart.valueOf( part ).getName() );
}
/**
*/
public String getVendor()
{
return _getComponent( _INDEX_VENDOR_ );
}
/**
*/
public void setVendor(
final String vendor
)
{
_setComponent( _INDEX_VENDOR_, vendor );
}
/**
*/
public String getProduct()
{
return _getComponent( _INDEX_PRODUCT_ );
}
/**
*/
public void setProduct(
final String product
)
{
_setComponent( _INDEX_PRODUCT_, product );
}
/**
*/
public String getVersion()
{
return _getComponent( _INDEX_VERSION_ );
}
/**
*/
public void setVersion(
final String version
)
{
_setComponent( _INDEX_VERSION_, version );
}
/**
*/
public String getUpdate()
{
return _getComponent( _INDEX_UPDATE_ );
}
/**
*/
public void setUpdate(
final String update
)
{
_setComponent( _INDEX_UPDATE_, update );
}
/**
*/
public String getEdition()
{
return _getComponent( _INDEX_EDITION_ );
}
/**
*/
public void setEdition(
final String edition
)
{
_setComponent( _INDEX_EDITION_, edition );
}
/**
*/
public String getLanguage()
{
return _getComponent( _INDEX_LANGUAGE_ );
}
/**
*/
public void setLanguage(
final String language
)
{
_setComponent( _INDEX_LANGUAGE_, language );
}
//**************************************************************
// Iterable
//**************************************************************
public Iterator<String> iterator()
{
return Arrays.asList( _components ).iterator();
}
//**************************************************************
// java.lang.Object
//**************************************************************
@Override
public String toString()
{
return getName();
}
}
// CpeNameStructure
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.gwt;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.GL30;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.google.gwt.canvas.client.Canvas;
import com.google.gwt.dom.client.CanvasElement;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.webgl.client.WebGLContextAttributes;
import com.google.gwt.webgl.client.WebGLRenderingContext;
public class GwtGraphics implements Graphics {
CanvasElement canvas;
WebGLRenderingContext context;
GL20 gl;
String extensions;
float fps = 0;
long lastTimeStamp = System.currentTimeMillis();
float deltaTime = 0;
float time = 0;
int frames;
GwtApplicationConfiguration config;
boolean inFullscreenMode = false;
public GwtGraphics (Panel root, GwtApplicationConfiguration config) {
Canvas canvasWidget = Canvas.createIfSupported();
if (canvasWidget == null) throw new GdxRuntimeException("Canvas not supported");
canvas = canvasWidget.getCanvasElement();
root.add(canvasWidget);
canvas.setWidth(config.width);
canvas.setHeight(config.height);
this.config = config;
WebGLContextAttributes attributes = WebGLContextAttributes.create();
attributes.setAntialias(config.antialiasing);
attributes.setStencil(config.stencil);
attributes.setAlpha(false);
attributes.setPremultipliedAlpha(false);
attributes.setPreserveDrawingBuffer(config.preserveDrawingBuffer);
context = WebGLRenderingContext.getContext(canvas, attributes);
context.viewport(0, 0, config.width, config.height);
this.gl = config.useDebugGL ? new GwtGL20Debug(context) : new GwtGL20(context);
}
public WebGLRenderingContext getContext () {
return context;
}
@Override
public GL20 getGL20 () {
return gl;
}
@Override
public int getWidth () {
return canvas.getWidth();
}
@Override
public int getHeight () {
return canvas.getHeight();
}
@Override
public float getDeltaTime () {
return deltaTime;
}
@Override
public int getFramesPerSecond () {
return (int)fps;
}
@Override
public GraphicsType getType () {
return GraphicsType.WebGL;
}
@Override
public float getPpiX () {
return 96;
}
@Override
public float getPpiY () {
return 96;
}
@Override
public float getPpcX () {
return 96 / 2.54f;
}
@Override
public float getPpcY () {
return 96 / 2.54f;
}
@Override
public boolean supportsDisplayModeChange () {
return true;
}
@Override
public DisplayMode[] getDisplayModes () {
return new DisplayMode[] {new DisplayMode(getScreenWidthJSNI(), getScreenHeightJSNI(), 60, 8) {}};
}
private native int getScreenWidthJSNI () /*-{
return $wnd.screen.width;
}-*/;
private native int getScreenHeightJSNI () /*-{
return $wnd.screen.height;
}-*/;
private native boolean isFullscreenJSNI () /*-{
if("webkitIsFullScreen" in $doc) {
return $doc.webkitIsFullScreen;
}
if("mozFullScreen" in $doc) {
return $doc.mozFullScreen;
}
return false
}-*/;
private void fullscreenChanged () {
if (!isFullscreen()) {
canvas.setWidth(config.width);
canvas.setHeight(config.height);
}
}
private native boolean setFullscreenJSNI (GwtGraphics graphics, CanvasElement element) /*-{
if(element.webkitRequestFullScreen) {
element.width = $wnd.screen.width;
element.height = $wnd.screen.height;
element.webkitRequestFullScreen(Element.ALLOW_KEYBOARD_INPUT);
$doc.addEventListener("webkitfullscreenchange", function() {
graphics.@com.badlogic.gdx.backends.gwt.GwtGraphics::fullscreenChanged()();
}, false);
return true;
}
if(element.mozRequestFullScreen) {
element.width = $wnd.screen.width;
element.height = $wnd.screen.height;
element.mozRequestFullScreen();
$doc.addEventListener("mozfullscreenchange", function() {
graphics.@com.badlogic.gdx.backends.gwt.GwtGraphics::fullscreenChanged()();
}, false);
return true;
}
return false;
}-*/;
private native void exitFullscreen () /*-{
if($doc.webkitExitFullscreen) $doc.webkitExitFullscreen();
if($doc.mozExitFullscreen) $doc.mozExitFullscreen();
}-*/;
@Override
public DisplayMode getDesktopDisplayMode () {
return new DisplayMode(getScreenWidthJSNI(), getScreenHeightJSNI(), 60, 8) {};
}
@Override
public boolean setDisplayMode (DisplayMode displayMode) {
if (displayMode.width != getScreenWidthJSNI() && displayMode.height != getScreenHeightJSNI()) return false;
return setFullscreenJSNI(this, canvas);
}
@Override
public boolean setDisplayMode (int width, int height, boolean fullscreen) {
if (fullscreen) {
if (width != getScreenWidthJSNI() && height != getScreenHeightJSNI()) return false;
return setFullscreenJSNI(this, canvas);
} else {
if (isFullscreenJSNI()) exitFullscreen();
canvas.setWidth(width);
canvas.setHeight(height);
return true;
}
}
@Override
public BufferFormat getBufferFormat () {
return new BufferFormat(8, 8, 8, 0, 16, config.stencil ? 8 : 0, 0, false);
}
@Override
public boolean supportsExtension (String extension) {
if (extensions == null) extensions = Gdx.gl.glGetString(GL20.GL_EXTENSIONS);
return extensions.contains(extension);
}
public void update () {
long currTimeStamp = System.currentTimeMillis();
deltaTime = (currTimeStamp - lastTimeStamp) / 1000.0f;
lastTimeStamp = currTimeStamp;
time += deltaTime;
frames++;
if (time > 1) {
this.fps = frames;
time = 0;
frames = 0;
}
}
@Override
public void setTitle (String title) {
}
@Override
public void setVSync (boolean vsync) {
}
@Override
public float getDensity () {
return 96.0f / 160;
}
@Override
public void setContinuousRendering (boolean isContinuous) {
}
@Override
public boolean isContinuousRendering () {
return false;
}
@Override
public void requestRendering () {
}
@Override
public float getRawDeltaTime () {
return getDeltaTime();
}
@Override
public boolean isFullscreen () {
return isFullscreenJSNI();
}
@Override
public boolean isGL30Available () {
return false;
}
@Override
public GL30 getGL30 () {
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.balancer;
import static org.apache.hadoop.hbase.ServerName.NON_STARTCODE;
import static org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper.FAVORED_NODES_NUM;
import static org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position.PRIMARY;
import static org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position.SECONDARY;
import static org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position.TERTIARY;
import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.ServerMetrics;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesManager;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan.Position;
import org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
/**
* An implementation of the {@link org.apache.hadoop.hbase.master.LoadBalancer} that
* assigns favored nodes for each region. There is a Primary RegionServer that hosts
* the region, and then there is Secondary and Tertiary RegionServers. Currently, the
* favored nodes information is used in creating HDFS files - the Primary RegionServer
* passes the primary, secondary, tertiary node addresses as hints to the
* DistributedFileSystem API for creating files on the filesystem. These nodes are
* treated as hints by the HDFS to place the blocks of the file. This alleviates the
* problem to do with reading from remote nodes (since we can make the Secondary
* RegionServer as the new Primary RegionServer) after a region is recovered. This
* should help provide consistent read latencies for the regions even when their
* primary region servers die. This provides two
* {@link CandidateGenerator}
*
*/
@InterfaceAudience.Private
public class FavoredStochasticBalancer extends StochasticLoadBalancer implements
FavoredNodesPromoter {
private static final Logger LOG = LoggerFactory.getLogger(FavoredStochasticBalancer.class);
private FavoredNodesManager fnm;
@Override
public void setFavoredNodesManager(FavoredNodesManager fnm) {
this.fnm = fnm;
}
@Override
protected List<CandidateGenerator> createCandidateGenerators() {
List<CandidateGenerator> fnPickers = new ArrayList<>(2);
fnPickers.add(new FavoredNodeLoadPicker());
fnPickers.add(new FavoredNodeLocalityPicker());
return fnPickers;
}
/**
* @return any candidate generator in random
*/
@Override
protected CandidateGenerator getRandomGenerator() {
return candidateGenerators.get(ThreadLocalRandom.current().nextInt(candidateGenerators.size()));
}
/**
* Round robin assignment: Segregate the regions into two types:
*
* 1. The regions that have favored node assignment where at least one of the favored node
* is still alive. In this case, try to adhere to the current favored nodes assignment as
* much as possible - i.e., if the current primary is gone, then make the secondary or
* tertiary as the new host for the region (based on their current load). Note that we don't
* change the favored node assignments here (even though one or more favored node is
* currently down). That will be done by the admin operations.
*
* 2. The regions that currently don't have favored node assignments. Generate favored nodes
* for them and then assign. Generate the primary fn in round robin fashion and generate
* secondary and tertiary as per favored nodes constraints.
*/
@Override
@NonNull
public Map<ServerName, List<RegionInfo>> roundRobinAssignment(List<RegionInfo> regions,
List<ServerName> servers) throws HBaseIOException {
metricsBalancer.incrMiscInvocations();
Map<ServerName, List<RegionInfo>> assignmentMap = new HashMap<>();
if (regions.isEmpty()) {
return assignmentMap;
}
Set<RegionInfo> regionSet = new HashSet<>(regions);
try {
FavoredNodeAssignmentHelper helper = new FavoredNodeAssignmentHelper(servers, rackManager);
helper.initialize();
Set<RegionInfo> systemRegions = FavoredNodesManager.filterNonFNApplicableRegions(regionSet);
regionSet.removeAll(systemRegions);
// Assign all system regions
Map<ServerName, List<RegionInfo>> systemAssignments =
super.roundRobinAssignment(Lists.newArrayList(systemRegions), servers);
// Segregate favored and non-favored nodes regions and assign accordingly.
Pair<Map<ServerName,List<RegionInfo>>, List<RegionInfo>> segregatedRegions =
segregateRegionsAndAssignRegionsWithFavoredNodes(regionSet, servers);
Map<ServerName, List<RegionInfo>> regionsWithFavoredNodesMap = segregatedRegions.getFirst();
Map<ServerName, List<RegionInfo>> regionsWithoutFN =
generateFNForRegionsWithoutFN(helper, segregatedRegions.getSecond());
// merge the assignment maps
mergeAssignmentMaps(assignmentMap, systemAssignments);
mergeAssignmentMaps(assignmentMap, regionsWithFavoredNodesMap);
mergeAssignmentMaps(assignmentMap, regionsWithoutFN);
} catch (Exception ex) {
throw new HBaseIOException("Encountered exception while doing favored-nodes assignment "
+ ex + " Falling back to regular assignment", ex);
}
return assignmentMap;
}
private void mergeAssignmentMaps(Map<ServerName, List<RegionInfo>> assignmentMap,
Map<ServerName, List<RegionInfo>> otherAssignments) {
if (otherAssignments == null || otherAssignments.isEmpty()) {
return;
}
for (Map.Entry<ServerName, List<RegionInfo>> entry : otherAssignments.entrySet()) {
ServerName sn = entry.getKey();
List<RegionInfo> regionsList = entry.getValue();
if (assignmentMap.get(sn) == null) {
assignmentMap.put(sn, Lists.newArrayList(regionsList));
} else {
assignmentMap.get(sn).addAll(regionsList);
}
}
}
private Map<ServerName, List<RegionInfo>> generateFNForRegionsWithoutFN(
FavoredNodeAssignmentHelper helper, List<RegionInfo> regions) throws IOException {
Map<ServerName, List<RegionInfo>> assignmentMap = Maps.newHashMap();
Map<RegionInfo, List<ServerName>> regionsNoFNMap;
if (regions.size() > 0) {
regionsNoFNMap = helper.generateFavoredNodesRoundRobin(assignmentMap, regions);
fnm.updateFavoredNodes(regionsNoFNMap);
}
return assignmentMap;
}
/**
* Return a pair - one with assignments when favored nodes are present and another with regions
* without favored nodes.
*/
private Pair<Map<ServerName, List<RegionInfo>>, List<RegionInfo>>
segregateRegionsAndAssignRegionsWithFavoredNodes(Collection<RegionInfo> regions,
List<ServerName> onlineServers) throws HBaseIOException {
// Since we expect FN to be present most of the time, lets create map with same size
Map<ServerName, List<RegionInfo>> assignmentMapForFavoredNodes =
new HashMap<>(onlineServers.size());
List<RegionInfo> regionsWithNoFavoredNodes = new ArrayList<>();
for (RegionInfo region : regions) {
List<ServerName> favoredNodes = fnm.getFavoredNodes(region);
ServerName primaryHost = null;
ServerName secondaryHost = null;
ServerName tertiaryHost = null;
if (favoredNodes != null && !favoredNodes.isEmpty()) {
for (ServerName s : favoredNodes) {
ServerName serverWithLegitStartCode = getServerFromFavoredNode(onlineServers, s);
if (serverWithLegitStartCode != null) {
FavoredNodesPlan.Position position =
FavoredNodesPlan.getFavoredServerPosition(favoredNodes, s);
if (Position.PRIMARY.equals(position)) {
primaryHost = serverWithLegitStartCode;
} else if (Position.SECONDARY.equals(position)) {
secondaryHost = serverWithLegitStartCode;
} else if (Position.TERTIARY.equals(position)) {
tertiaryHost = serverWithLegitStartCode;
}
}
}
assignRegionToAvailableFavoredNode(assignmentMapForFavoredNodes, region, primaryHost,
secondaryHost, tertiaryHost);
} else {
regionsWithNoFavoredNodes.add(region);
}
}
return new Pair<>(assignmentMapForFavoredNodes, regionsWithNoFavoredNodes);
}
private void addRegionToMap(Map<ServerName, List<RegionInfo>> assignmentMapForFavoredNodes,
RegionInfo region, ServerName host) {
List<RegionInfo> regionsOnServer = assignmentMapForFavoredNodes.get(host);
if (regionsOnServer == null) {
regionsOnServer = Lists.newArrayList();
assignmentMapForFavoredNodes.put(host, regionsOnServer);
}
regionsOnServer.add(region);
}
/**
* Get the ServerName for the FavoredNode. Since FN's startcode is -1, we could want to get the
* ServerName with the correct start code from the list of provided servers.
*/
private ServerName getServerFromFavoredNode(List<ServerName> servers, ServerName fn) {
for (ServerName server : servers) {
if (ServerName.isSameAddress(fn, server)) {
return server;
}
}
return null;
}
/**
* Assign the region to primary if its available. If both secondary and tertiary are available,
* assign to the host which has less load. Else assign to secondary or tertiary whichever is
* available (in that order).
*/
private void assignRegionToAvailableFavoredNode(
Map<ServerName, List<RegionInfo>> assignmentMapForFavoredNodes, RegionInfo region,
ServerName primaryHost, ServerName secondaryHost, ServerName tertiaryHost) {
if (primaryHost != null) {
addRegionToMap(assignmentMapForFavoredNodes, region, primaryHost);
} else if (secondaryHost != null && tertiaryHost != null) {
// Assign the region to the one with a lower load (both have the desired hdfs blocks)
ServerName s;
ServerMetrics tertiaryLoad = provider.getLoad(tertiaryHost);
ServerMetrics secondaryLoad = provider.getLoad(secondaryHost);
if (secondaryLoad != null && tertiaryLoad != null) {
if (secondaryLoad.getRegionMetrics().size() < tertiaryLoad.getRegionMetrics().size()) {
s = secondaryHost;
} else {
s = tertiaryHost;
}
} else {
// We don't have one/more load, lets just choose a random node
s = ThreadLocalRandom.current().nextBoolean() ? secondaryHost : tertiaryHost;
}
addRegionToMap(assignmentMapForFavoredNodes, region, s);
} else if (secondaryHost != null) {
addRegionToMap(assignmentMapForFavoredNodes, region, secondaryHost);
} else if (tertiaryHost != null) {
addRegionToMap(assignmentMapForFavoredNodes, region, tertiaryHost);
} else {
// No favored nodes are online, lets assign to BOGUS server
addRegionToMap(assignmentMapForFavoredNodes, region, BOGUS_SERVER_NAME);
}
}
/**
* If we have favored nodes for a region, we will return one of the FN as destination. If
* favored nodes are not present for a region, we will generate and return one of the FN as
* destination. If we can't generate anything, lets fallback.
*/
@Override
public ServerName randomAssignment(RegionInfo regionInfo, List<ServerName> servers)
throws HBaseIOException {
ServerName destination = null;
if (!FavoredNodesManager.isFavoredNodeApplicable(regionInfo)) {
return super.randomAssignment(regionInfo, servers);
}
metricsBalancer.incrMiscInvocations();
Configuration conf = getConf();
List<ServerName> favoredNodes = fnm.getFavoredNodes(regionInfo);
if (favoredNodes == null || favoredNodes.isEmpty()) {
// Generate new favored nodes and return primary
FavoredNodeAssignmentHelper helper = new FavoredNodeAssignmentHelper(servers, conf);
helper.initialize();
try {
favoredNodes = helper.generateFavoredNodes(regionInfo);
updateFavoredNodesForRegion(regionInfo, favoredNodes);
} catch (IOException e) {
LOG.warn("Encountered exception while doing favored-nodes (random)assignment " + e);
throw new HBaseIOException(e);
}
}
List<ServerName> onlineServers = getOnlineFavoredNodes(servers, favoredNodes);
if (onlineServers.size() > 0) {
destination = onlineServers.get(ThreadLocalRandom.current().nextInt(onlineServers.size()));
}
boolean alwaysAssign = conf.getBoolean(FAVORED_ALWAYS_ASSIGN_REGIONS, true);
if (destination == null && alwaysAssign) {
LOG.warn("Can't generate FN for region: " + regionInfo + " falling back");
destination = super.randomAssignment(regionInfo, servers);
}
return destination;
}
private void updateFavoredNodesForRegion(RegionInfo regionInfo, List<ServerName> newFavoredNodes)
throws IOException {
Map<RegionInfo, List<ServerName>> regionFNMap = Maps.newHashMap();
regionFNMap.put(regionInfo, newFavoredNodes);
fnm.updateFavoredNodes(regionFNMap);
}
/**
* Reuse BaseLoadBalancer's retainAssignment, but generate favored nodes when its missing.
*/
@Override
@NonNull
public Map<ServerName, List<RegionInfo>> retainAssignment(Map<RegionInfo, ServerName> regions,
List<ServerName> servers) throws HBaseIOException {
Map<ServerName, List<RegionInfo>> assignmentMap = Maps.newHashMap();
Map<ServerName, List<RegionInfo>> result = super.retainAssignment(regions, servers);
if (result.isEmpty()) {
LOG.warn("Nothing to assign to, probably no servers or no regions");
return result;
}
// Lets check if favored nodes info is in META, if not generate now.
FavoredNodeAssignmentHelper helper = new FavoredNodeAssignmentHelper(servers, getConf());
helper.initialize();
LOG.debug("Generating favored nodes for regions missing them.");
Map<RegionInfo, List<ServerName>> regionFNMap = Maps.newHashMap();
try {
for (Map.Entry<ServerName, List<RegionInfo>> entry : result.entrySet()) {
ServerName sn = entry.getKey();
ServerName primary = ServerName.valueOf(sn.getHostname(), sn.getPort(), NON_STARTCODE);
for (RegionInfo hri : entry.getValue()) {
if (FavoredNodesManager.isFavoredNodeApplicable(hri)) {
List<ServerName> favoredNodes = fnm.getFavoredNodes(hri);
if (favoredNodes == null || favoredNodes.size() < FAVORED_NODES_NUM) {
LOG.debug("Generating favored nodes for: " + hri + " with primary: " + primary);
ServerName[] secondaryAndTertiaryNodes = helper.getSecondaryAndTertiary(hri, primary);
if (secondaryAndTertiaryNodes != null && secondaryAndTertiaryNodes.length == 2) {
List<ServerName> newFavoredNodes = Lists.newArrayList();
newFavoredNodes.add(primary);
newFavoredNodes.add(ServerName.valueOf(secondaryAndTertiaryNodes[0].getHostname(),
secondaryAndTertiaryNodes[0].getPort(), NON_STARTCODE));
newFavoredNodes.add(ServerName.valueOf(secondaryAndTertiaryNodes[1].getHostname(),
secondaryAndTertiaryNodes[1].getPort(), NON_STARTCODE));
regionFNMap.put(hri, newFavoredNodes);
addRegionToMap(assignmentMap, hri, sn);
} else {
throw new HBaseIOException("Cannot generate secondary/tertiary FN for " + hri
+ " generated "
+ (secondaryAndTertiaryNodes != null ? secondaryAndTertiaryNodes : " nothing"));
}
} else {
List<ServerName> onlineFN = getOnlineFavoredNodes(servers, favoredNodes);
if (onlineFN.isEmpty()) {
// All favored nodes are dead, lets assign it to BOGUS
addRegionToMap(assignmentMap, hri, BOGUS_SERVER_NAME);
} else {
// Is primary not on FN? Less likely, but we can still take care of this.
if (FavoredNodesPlan.getFavoredServerPosition(favoredNodes, sn) != null) {
addRegionToMap(assignmentMap, hri, sn);
} else {
ServerName destination =
onlineFN.get(ThreadLocalRandom.current().nextInt(onlineFN.size()));
LOG.warn("Region: " + hri + " not hosted on favored nodes: " + favoredNodes
+ " current: " + sn + " moving to: " + destination);
addRegionToMap(assignmentMap, hri, destination);
}
}
}
} else {
addRegionToMap(assignmentMap, hri, sn);
}
}
}
if (!regionFNMap.isEmpty()) {
LOG.debug("Updating FN in meta for missing regions, count: " + regionFNMap.size());
fnm.updateFavoredNodes(regionFNMap);
}
} catch (IOException e) {
throw new HBaseIOException("Cannot generate/update FN for regions: " + regionFNMap.keySet());
}
return assignmentMap;
}
/**
* Return list of favored nodes that are online.
*/
private List<ServerName> getOnlineFavoredNodes(List<ServerName> onlineServers,
List<ServerName> serversWithoutStartCodes) {
if (serversWithoutStartCodes == null) {
return null;
} else {
List<ServerName> result = Lists.newArrayList();
for (ServerName sn : serversWithoutStartCodes) {
for (ServerName online : onlineServers) {
if (ServerName.isSameAddress(sn, online)) {
result.add(online);
}
}
}
return result;
}
}
@Override
public List<ServerName> getFavoredNodes(RegionInfo regionInfo) {
return this.fnm.getFavoredNodes(regionInfo);
}
/**
* Generate Favored Nodes for daughters during region split.
* <p/>
* If the parent does not have FN, regenerates them for the daughters.
* <p/>
* If the parent has FN, inherit two FN from parent for each daughter and generate the remaining.
* The primary FN for both the daughters should be the same as parent. Inherit the secondary FN
* from the parent but keep it different for each daughter. Choose the remaining FN randomly. This
* would give us better distribution over a period of time after enough splits.
*/
@Override
public void generateFavoredNodesForDaughter(List<ServerName> servers, RegionInfo parent,
RegionInfo regionA, RegionInfo regionB) throws IOException {
Map<RegionInfo, List<ServerName>> result = new HashMap<>();
FavoredNodeAssignmentHelper helper = new FavoredNodeAssignmentHelper(servers, rackManager);
helper.initialize();
List<ServerName> parentFavoredNodes = fnm.getFavoredNodes(parent);
if (parentFavoredNodes == null) {
LOG.debug("Unable to find favored nodes for parent, " + parent
+ " generating new favored nodes for daughter");
result.put(regionA, helper.generateFavoredNodes(regionA));
result.put(regionB, helper.generateFavoredNodes(regionB));
} else {
// Lets get the primary and secondary from parent for regionA
Set<ServerName> regionAFN =
getInheritedFNForDaughter(helper, parentFavoredNodes, PRIMARY, SECONDARY);
result.put(regionA, Lists.newArrayList(regionAFN));
// Lets get the primary and tertiary from parent for regionB
Set<ServerName> regionBFN =
getInheritedFNForDaughter(helper, parentFavoredNodes, PRIMARY, TERTIARY);
result.put(regionB, Lists.newArrayList(regionBFN));
}
fnm.updateFavoredNodes(result);
}
private Set<ServerName> getInheritedFNForDaughter(FavoredNodeAssignmentHelper helper,
List<ServerName> parentFavoredNodes, Position primary, Position secondary)
throws IOException {
Set<ServerName> daughterFN = Sets.newLinkedHashSet();
if (parentFavoredNodes.size() >= primary.ordinal()) {
daughterFN.add(parentFavoredNodes.get(primary.ordinal()));
}
if (parentFavoredNodes.size() >= secondary.ordinal()) {
daughterFN.add(parentFavoredNodes.get(secondary.ordinal()));
}
while (daughterFN.size() < FAVORED_NODES_NUM) {
ServerName newNode = helper.generateMissingFavoredNode(Lists.newArrayList(daughterFN));
daughterFN.add(newNode);
}
return daughterFN;
}
/**
* Generate favored nodes for a region during merge. Choose the FN from one of the sources to
* keep it simple.
*/
@Override
public void generateFavoredNodesForMergedRegion(RegionInfo merged, RegionInfo [] mergeParents)
throws IOException {
updateFavoredNodesForRegion(merged, fnm.getFavoredNodes(mergeParents[0]));
}
/**
* Pick favored nodes with the highest locality for a region with lowest locality.
*/
private class FavoredNodeLocalityPicker extends CandidateGenerator {
@Override
protected BalanceAction generate(BalancerClusterState cluster) {
int thisServer = pickRandomServer(cluster);
int thisRegion;
if (thisServer == -1) {
LOG.trace("Could not pick lowest local region server");
return BalanceAction.NULL_ACTION;
} else {
// Pick lowest local region on this server
thisRegion = pickLowestLocalRegionOnServer(cluster, thisServer);
}
if (thisRegion == -1) {
if (cluster.regionsPerServer[thisServer].length > 0) {
LOG.trace("Could not pick lowest local region even when region server held "
+ cluster.regionsPerServer[thisServer].length + " regions");
}
return BalanceAction.NULL_ACTION;
}
RegionInfo hri = cluster.regions[thisRegion];
List<ServerName> favoredNodes = fnm.getFavoredNodes(hri);
int otherServer;
if (favoredNodes == null) {
if (!FavoredNodesManager.isFavoredNodeApplicable(hri)) {
otherServer = pickOtherRandomServer(cluster, thisServer);
} else {
// No FN, ignore
LOG.trace("Ignoring, no favored nodes for region: " + hri);
return BalanceAction.NULL_ACTION;
}
} else {
// Pick other favored node with the highest locality
otherServer = getDifferentFavoredNode(cluster, favoredNodes, thisServer);
}
return getAction(thisServer, thisRegion, otherServer, -1);
}
private int getDifferentFavoredNode(BalancerClusterState cluster, List<ServerName> favoredNodes,
int currentServer) {
List<Integer> fnIndex = new ArrayList<>();
for (ServerName sn : favoredNodes) {
if (cluster.serversToIndex.containsKey(sn.getAddress())) {
fnIndex.add(cluster.serversToIndex.get(sn.getAddress()));
}
}
float locality = 0;
int highestLocalRSIndex = -1;
for (Integer index : fnIndex) {
if (index != currentServer) {
float temp = cluster.localityPerServer[index];
if (temp >= locality) {
locality = temp;
highestLocalRSIndex = index;
}
}
}
return highestLocalRSIndex;
}
private int pickLowestLocalRegionOnServer(BalancerClusterState cluster, int server) {
return cluster.getLowestLocalityRegionOnServer(server);
}
}
/*
* This is like LoadCandidateGenerator, but we choose appropriate FN for the region on the
* most loaded server.
*/
class FavoredNodeLoadPicker extends CandidateGenerator {
@Override
BalanceAction generate(BalancerClusterState cluster) {
cluster.sortServersByRegionCount();
int thisServer = pickMostLoadedServer(cluster);
int thisRegion = pickRandomRegion(cluster, thisServer, 0);
RegionInfo hri = cluster.regions[thisRegion];
int otherServer;
List<ServerName> favoredNodes = fnm.getFavoredNodes(hri);
if (favoredNodes == null) {
if (!FavoredNodesManager.isFavoredNodeApplicable(hri)) {
otherServer = pickLeastLoadedServer(cluster, thisServer);
} else {
return BalanceAction.NULL_ACTION;
}
} else {
otherServer = pickLeastLoadedFNServer(cluster, favoredNodes, thisServer);
}
return getAction(thisServer, thisRegion, otherServer, -1);
}
private int pickLeastLoadedServer(final BalancerClusterState cluster, int thisServer) {
Integer[] servers = cluster.serverIndicesSortedByRegionCount;
int index;
for (index = 0; index < servers.length ; index++) {
if ((servers[index] != null) && servers[index] != thisServer) {
break;
}
}
return servers[index];
}
private int pickLeastLoadedFNServer(final BalancerClusterState cluster,
List<ServerName> favoredNodes, int currentServerIndex) {
List<Integer> fnIndex = new ArrayList<>();
for (ServerName sn : favoredNodes) {
if (cluster.serversToIndex.containsKey(sn.getAddress())) {
fnIndex.add(cluster.serversToIndex.get(sn.getAddress()));
}
}
int leastLoadedFN = -1;
int load = Integer.MAX_VALUE;
for (Integer index : fnIndex) {
if (index != currentServerIndex) {
int temp = cluster.getNumRegions(index);
if (temp < load) {
load = temp;
leastLoadedFN = index;
}
}
}
return leastLoadedFN;
}
private int pickMostLoadedServer(final BalancerClusterState cluster) {
Integer[] servers = cluster.serverIndicesSortedByRegionCount;
int index;
for (index = servers.length - 1; index > 0 ; index--) {
if (servers[index] != null) {
break;
}
}
return servers[index];
}
}
/**
* For all regions correctly assigned to favored nodes, we just use the stochastic balancer
* implementation. For the misplaced regions, we assign a bogus server to it and AM takes care.
*/
@Override
protected List<RegionPlan> balanceTable(TableName tableName,
Map<ServerName, List<RegionInfo>> loadOfOneTable) {
List<RegionPlan> regionPlans = Lists.newArrayList();
Map<ServerName, List<RegionInfo>> correctAssignments = new HashMap<>();
int misplacedRegions = 0;
for (Map.Entry<ServerName, List<RegionInfo>> entry : loadOfOneTable.entrySet()) {
ServerName current = entry.getKey();
List<RegionInfo> regions = Lists.newArrayList();
correctAssignments.put(current, regions);
for (RegionInfo hri : entry.getValue()) {
List<ServerName> favoredNodes = fnm.getFavoredNodes(hri);
if (FavoredNodesPlan.getFavoredServerPosition(favoredNodes, current) != null ||
!FavoredNodesManager.isFavoredNodeApplicable(hri)) {
regions.add(hri);
} else {
// No favored nodes, lets unassign.
LOG.warn("Region not on favored nodes, unassign. Region: " + hri + " current: " +
current + " favored nodes: " + favoredNodes);
try {
provider.unassign(hri);
} catch (IOException e) {
LOG.warn("Failed unassign", e);
continue;
}
RegionPlan rp = new RegionPlan(hri, null, null);
regionPlans.add(rp);
misplacedRegions++;
}
}
}
LOG.debug("Found misplaced regions: " + misplacedRegions + ", not on favored nodes.");
List<RegionPlan> regionPlansFromBalance = super.balanceTable(tableName, correctAssignments);
if (regionPlansFromBalance != null) {
regionPlans.addAll(regionPlansFromBalance);
}
return regionPlans;
}
}
| |
package com.taxisurfr.server.entity;
import com.googlecode.objectify.annotation.Entity;
import com.googlecode.objectify.annotation.Id;
import com.taxisurfr.shared.Currency;
import com.taxisurfr.shared.OrderStatus;
import com.taxisurfr.shared.OrderType;
import com.taxisurfr.shared.model.BookingInfo;
import com.taxisurfr.shared.model.FinanceInfo;
import com.taxisurfr.shared.model.Info;
import com.taxisurfr.shared.model.RouteInfo;
import java.io.Serializable;
import java.util.Date;
@Entity
public class Booking<T extends Info, K extends ArugamEntity> extends ArugamEntity implements Serializable, Comparable<Booking>
{
public Booking()
{
orderStatus = OrderStatus.BOOKED;
orderType = OrderType.BOOKING;
instanziated = new Date();
rated = false;
}
private static final long serialVersionUID = 1L;
@Id Long id;
private Date date;
public Date getDate()
{
return date;
}
public void setDate(Date date)
{
this.date = date;
}
private String name;
private String email;
private String dateText;
private String flightNo;
private String landingTime;
private int pax;
private int surfboards;
private OrderStatus orderStatus;
private String requirements;
private String tx;
private String ref;
private String client;
private Boolean shareWanted;
private Date instanziated;
private OrderType orderType;
private Long parentId;
private Long route;
private Boolean rated;
private String stripeRefusal;
private Currency currency = Currency.USD;
private int paidPrice;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getPaidPrice()
{
return paidPrice;
}
public void setPaidPrice(int paidPrice)
{
this.paidPrice = paidPrice;
}
public Long getRoute()
{
return route;
}
public void setRoute(Long route)
{
this.route = route;
}
public Boolean getRated()
{
return rated;
}
public void setRated(boolean rated)
{
this.rated = rated;
}
public Boolean getShareWanted()
{
return shareWanted;
}
public void setShareWanted(Boolean shareWanted)
{
this.shareWanted = shareWanted;
}
public Long getParentId()
{
return parentId;
}
public void setParentId(Long parentId)
{
this.parentId = parentId;
}
public OrderType getOrderType()
{
return orderType;
}
public void setOrderType(OrderType orderType)
{
this.orderType = orderType;
}
public Date getInstanziated()
{
return instanziated;
}
public void setInstanziated(Date instanziated)
{
this.instanziated = instanziated;
}
public void setStatus(OrderStatus status)
{
this.orderStatus = status;
}
public String getTx()
{
return tx;
}
public String getClient()
{
return client;
}
public void setClient(String client)
{
this.client = client;
}
public void setTx(String tx)
{
this.tx = tx;
}
public OrderStatus getStatus()
{
return orderStatus;
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public String getEmail()
{
return email;
}
public void setEmail(String email)
{
this.email = email;
}
public int getPax()
{
return pax;
}
public void setPax(int pax)
{
this.pax = pax;
}
public String getRequirements()
{
return requirements;
}
public void setRequirements(String requirements)
{
this.requirements = requirements;
}
public int getSurfboards()
{
return surfboards;
}
public void setSurfboards(int surfboards)
{
this.surfboards = surfboards;
}
public static Booking getBooking(BookingInfo bookingInfo, String client)
{
Booking booking = new Booking();
booking.setDate(bookingInfo.getDate());
booking.setDateText(bookingInfo.getDateText());
booking.setEmail(bookingInfo.getEmail());
booking.setName(bookingInfo.getName());
booking.setFlightNo(bookingInfo.getFlightNo());
booking.setLandingTime(bookingInfo.getLandingTime());
booking.setPax(bookingInfo.getPax());
booking.setSurfboards(bookingInfo.getSurfboards());
booking.setRequirements(bookingInfo.getRequirements());
booking.setClient(client);
booking.setShareWanted(bookingInfo.getShareWanted());
booking.setParentId(bookingInfo.getParentId());
booking.setOrderType(bookingInfo.getOrderType() != null ? bookingInfo.getOrderType() : OrderType.BOOKING);
booking.setRoute(bookingInfo.getRouteId());
booking.setCurrency(bookingInfo.getCurrency());
booking.setPaidPrice(bookingInfo.getPaidPrice());
return booking;
}
public ArchivedBooking getArchivedBooking()
{
ArchivedBooking booking = new ArchivedBooking();
booking.setDate(date);
booking.setEmail(email);
booking.setName(name);
booking.setFlightNo(flightNo);
booking.setLandingTime(landingTime);
booking.setPax(pax);
booking.setSurfboards(surfboards);
booking.setRequirements(requirements);
booking.setShareWanted(shareWanted);
booking.setOrderType(orderType);
booking.setStatus(orderStatus);
booking.setRoute(route);
booking.setRef(ref);
return booking;
}
@Override
public BookingInfo getInfo()
{
return getBookingInfo(null);
}
public BookingInfo getBookingInfo(RouteInfo routeInfo)
{
BookingInfo bookingInfo = new BookingInfo();
bookingInfo.setId(id);
bookingInfo.setDate(getDate());
bookingInfo.setDateText(getDateText());
bookingInfo.setEmail(getEmail());
bookingInfo.setName(getName());
bookingInfo.setFlightNo(getFlightNo());
bookingInfo.setLandingTime(getLandingTime());
bookingInfo.setPax(getPax());
bookingInfo.setSurfboards(getSurfboards());
bookingInfo.setRequirements(getRequirements());
bookingInfo.setParentId(getParentId());
bookingInfo.setStatus(getStatus());
bookingInfo.setOrderType(getOrderType());
bookingInfo.setShareWanted(getShareWanted());
bookingInfo.setRouteInfo(routeInfo);
bookingInfo.setRouteId(route);
bookingInfo.setOrderRef(ref);
bookingInfo.setCurrency(currency);
bookingInfo.setPaidPrice(paidPrice);
return bookingInfo;
}
public String getDateText()
{
return dateText;
}
public void setDateText(String dateText)
{
this.dateText = dateText;
}
public String getFlightNo()
{
return flightNo;
}
public void setFlightNo(String flightNo)
{
this.flightNo = flightNo;
}
public String getLandingTime()
{
return landingTime;
}
public void setLandingTime(String landingTime)
{
this.landingTime = landingTime;
}
// deprecated
public String getRef()
{
return ref;
}
public void setRef(String ref)
{
this.ref =ref;
}
private String parentRef;
public String getParentRef()
{
return parentRef;
}
public void setParentRef(String parentRef)
{
this.parentRef = parentRef;
}
@Override
public int compareTo(Booking other)
{
// compareTo should return < 0 if this is supposed to be
// less than other, > 0 if this is supposed to be greater than
// other and 0 if they are supposed to be equal
return this.instanziated.after(instanziated) ? -1 : -1;
}
public FinanceInfo getFinanceInfo()
{
FinanceInfo info = new FinanceInfo();
info.setName(name);
info.setDate(date);
info.setId(id);
return info;
}
private void setCurrency(Currency currency)
{
this.currency = currency;
}
public Currency getCurrency() {
return currency;
}
public void setStripeRefusal(String stripeRefusal) {
this.stripeRefusal = stripeRefusal;
}
public String getStripeRefusal(){
return stripeRefusal;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chinamobile.bcbsp.graph;
import com.chinamobile.bcbsp.BSPConfiguration;
import com.chinamobile.bcbsp.Constants;
import com.chinamobile.bcbsp.api.BSP;
import com.chinamobile.bcbsp.api.Edge;
import com.chinamobile.bcbsp.api.Vertex;
import com.chinamobile.bcbsp.bspstaff.BSPStaffContext;
import com.chinamobile.bcbsp.bspstaff.Staff;
import com.chinamobile.bcbsp.comm.CommunicatorInterface;
import com.chinamobile.bcbsp.comm.GraphStaffHandler;
import com.chinamobile.bcbsp.io.RecordWriter;
import com.chinamobile.bcbsp.subgraph.SGAGraphdataStore;
import com.chinamobile.bcbsp.util.BSPJob;
import com.chinamobile.bcbsp.util.BSPJobID;
import com.chinamobile.bcbsp.util.ObjectSizer;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.util.ArrayList;
import java.util.HashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
/**
* GraphDataForDisk Graph data manager for disk supported.
*/
public class GraphDataForDisk implements GraphDataInterface {
/** class logger. */
private static final Log LOG = LogFactory.getLog(GraphDataForDisk.class);
/** Converter byte to MByte. */
private static final long MB_SIZE = 1048576;
/** Converter byte to KByte. */
private static final long KB_SIZE = 1024;
/** time for writing file on disk. */
private long writeDiskTime = 0;
/** time for reading file from disk. */
private long readDiskTime = 0;
/** vertex class. */
private Class<? extends Vertex<?, ?, ?>> vertexClass;
/** edge class. */
private Class<? extends Edge<?, ?>> edgeClass;
/**
* The beta parameter for the proportion of data memory for the graph data,
* 1-beta for messages data.
*/
private float beta;
/**
* The parameter for the percentage of the heap memory for the data memory
* (graph & messages).
*/
private float dataPercent;
/** Hash bucket number. */
private int hashBucketNumber;
/** Hash buckets of the headnode list. */
private ArrayList<ArrayList<Vertex>> hashBuckets;
/**
* Bitmaps for each hash bucket, the bits is sorted(from right) the same as
* the list of nodes in each bucket. A int(4 bytes) represents 32 head nodes.
*/
private ArrayList<ArrayList<Integer>> bitmaps;
/** Meta data table for hash buckets. */
private ArrayList<BucketMeta> metaTable;
/** Object sizer. */
private final ObjectSizer sizer;
/** Size of Vertex type instance(Bytes). */
private int sizeOfVertex;
/** Size of Edge type instance(Bytes). */
private int sizeOfEdge;
/** Total size of Vertex(Bytes). */
private long totalSizeOfVertex;
/** Total count of Vertex. */
private int totalCountOfVertex;
/** Total size of Edge.(Bytes). */
@SuppressWarnings("unused")
private long totalSizeOfEdge;
/** Total count of Edge. */
private int totalCountOfEdge;
/** Size of a reference. */
private final int sizeOfRef;
/** Size of an Integer. */
private final int sizeOfInteger;
/** The total space for graph data(Bytes). */
private long sizeOfGraphSpace;
/** The threshold size for graph data(Bytes). */
private long sizeThreshold; //
/** The current size of graph data(Bytes). */
private long sizeOfGraphDataInMem;
/** The size of bitmaps in memory(Bytes). */
private long sizeOfBitmapsInMem;
/** The size of metaTable in memory(Bytes). */
private long sizeOfMetaTable; //
/** BSP job ID. */
private BSPJobID jobID;
/** graph data partition ID. */
private int partitionID;
/** root file of graph data on disk. */
private File fileRoot;
/** graph data file on disk. */
private File graphDataFile;
/** graph data bucket file on disk. */
private File graphDataFileBucket;
/** reader the file of graph data on disk. */
private FileReader frGraphData;
/** buffer reader the file of graph data on disk. */
private BufferedReader brGraphData;
/** writer to writer graph data on disk. */
private FileWriter fwGraphData;
/** buffer writer to writer graph data on disk. */
private BufferedWriter bwGraphData;
/** Total number of head nodes. */
private int sizeForAll;
/** the sorted hash bucket index list. */
private int[] sortedBucketIndexList;
/** Current pointer of bucket index for traversal. */
private int currentBucketIndex;
/** Current pointer of node index in bucket for traversal. */
private int currentNodeIndex;
/** handle of BSP staff. */
private Staff staff;
/** The meta data for a bucket. */
public class BucketMeta {
/** Have been accessed by which number of super step. */
public int superStepCount;
/** flag of on disk. */
public boolean onDiskFlag;
/** The length of the bucket by Bytes. */
public long length;
/** The length of the part of the bucket still in memory by Bytes. */
public long lengthInMemory;
/** Number of all nodes in the bucket. */
public int count;
/** Number of active nodes in the bucket. */
public int activeCount;
}
/**
* Constructor of MessageQueuesForDisk. Initialize some Constants variables.
*/
public GraphDataForDisk() {
BSPConfiguration conf = new BSPConfiguration();
if (conf.getInt(Constants.BC_BSP_JVM_VERSION, 32) == 64) {
sizer = ObjectSizer.forSun64BitsVM();
} else {
sizer = ObjectSizer.forSun32BitsVM();
}
this.sizeOfRef = sizer.sizeOfRef();
this.sizeOfInteger = sizer.sizeOf(new Integer(0));
}
/** Initialize the Job information. */
@Override
public void initialize() {
BSPJob job = this.staff.getConf();
int partitionIDNow = this.staff.getPartition();
initialize(job, partitionIDNow);
}
/**
* Initialize the graph data disk manager with job and partitionID. Get the
* parameter of graph on disk.
* @param job
* @param partitionID
*/
public void initialize(BSPJob job, int partitionID) {
vertexClass = job.getVertexClass();
edgeClass = job.getEdgeClass();
LOG.info("========== Initializing Graph Data For Disk ==========");
this.dataPercent = job.getMemoryDataPercent(); // Default 0.8
this.jobID = job.getJobID();
this.partitionID = partitionID;
this.beta = job.getBeta();
this.hashBucketNumber = job.getHashBucketNumber();
LOG.info("[beta] = " + this.beta);
LOG.info("[hashBucketNumber] = " + this.hashBucketNumber);
this.hashBuckets = new ArrayList<ArrayList<Vertex>>(hashBucketNumber);
// So the bitmaps's length decides the maximum nodes of a bucket is
// 320*32.
this.bitmaps = new ArrayList<ArrayList<Integer>>(hashBucketNumber);
this.metaTable = new ArrayList<BucketMeta>(hashBucketNumber);
// Initialize the meta table and bitmaps.
for (int i = 0; i < hashBucketNumber; i++) {
this.hashBuckets.add(null);
// init the meta table.
BucketMeta meta = new BucketMeta();
meta.superStepCount = -1;
meta.onDiskFlag = false;
meta.length = 0;
meta.lengthInMemory = 0;
meta.count = 0;
meta.activeCount = 0;
metaTable.add(meta);
// init the bitmapsCache.
ArrayList<Integer> bitmap = new ArrayList<Integer>(
Constants.GRAPH_BITMAP_BUCKET_NUM_BYTES);
for (int j = 0; j < Constants.GRAPH_BITMAP_BUCKET_NUM_BYTES; j++) {
bitmap.add(0);
}
this.bitmaps.add(bitmap);
}
// Initialize the size of objects and data structures.
//int sizeOfMetaBucket = sizer.sizeOf(new BucketMeta());
//this.sizeOfMetaTable = (sizeOfMetaBucket + sizeOfRef) * hashBucketNumber;
int sizeOfBitmap = sizer.sizeOf(new ArrayList<Integer>());
this.sizeOfBitmapsInMem = (sizeOfBitmap + sizeOfRef) * hashBucketNumber;
Vertex<?, ?, ?> tmpVertex = null;
Edge<?, ?> tmpEdge = null;
try {
tmpVertex = this.vertexClass.newInstance();
tmpEdge = this.edgeClass.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
this.sizeOfVertex = sizer.sizeOf(tmpVertex);
this.sizeOfEdge = sizer.sizeOf(tmpEdge);
LOG.info("[Default initial size of Vertex] = " + this.sizeOfVertex + "B");
LOG.info("[Default initial size of Edge] = " + this.sizeOfEdge + "B");
// Get the memory mxBean.
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
// Get the heap memory usage.
MemoryUsage memoryUsage = memoryMXBean.getHeapMemoryUsage();
long maxHeapSize = memoryUsage.getMax();
LOG.info("[JVM max Heap size] = " + maxHeapSize / MB_SIZE + "MB");
this.sizeOfGraphSpace = (long) (maxHeapSize * dataPercent * beta);
this.sizeThreshold = (sizeOfGraphSpace);
this.sizeOfGraphDataInMem = 0;
this.sizeForAll = 0;
this.totalSizeOfVertex = 0;
this.totalCountOfVertex = 0;
this.totalSizeOfEdge = 0;
this.totalCountOfEdge = 0;
this.sortedBucketIndexList = new int[hashBucketNumber];
this.fileRoot = new File("/tmp/bcbsp/" + this.jobID.toString() + "/"
+ "partition-" + this.partitionID);
this.graphDataFile = new File(this.fileRoot + "/" + "GraphData");
LOG.info("[size of Graph Data Space] = " + this.sizeOfGraphSpace / MB_SIZE
+ "MB");
LOG.info("[threshold of Graph Data] = " + this.sizeThreshold / MB_SIZE
+ "MB");
LOG.info("======================================================");
}
@Override
public synchronized void addForAll(Vertex vertex) {
this.sizeForAll++;
String vertexID = String.valueOf(vertex.getVertexID());
int hashCode = vertexID.hashCode();
int hashIndex = hashCode % this.hashBucketNumber; // bucket index
hashIndex = (hashIndex < 0 ? hashIndex + this.hashBucketNumber : hashIndex);
/* Add the vertex to the right hash bucket. */
ArrayList<Vertex> hashBucket = this.hashBuckets.get(hashIndex);
// When the hash bucket reference is null, create a bucket.
if (hashBucket == null) {
hashBucket = new ArrayList<Vertex>();
}
hashBucket.add(vertex);
this.hashBuckets.set(hashIndex, hashBucket);
// Evaluate the memory size of the new Vertex.
int newVertexSize = this.sizeOfRef + this.sizer.sizeOf(vertex);
this.totalSizeOfVertex += newVertexSize;
this.totalCountOfVertex++;
this.totalCountOfEdge += vertex.getEdgesNum();
/** Add the vertex's size to the length of the bucket's meta. */
BucketMeta meta = this.metaTable.get(hashIndex);
meta.length = meta.length + newVertexSize;
meta.lengthInMemory = meta.lengthInMemory + newVertexSize;
meta.count = meta.count + 1;
meta.activeCount = meta.activeCount + 1;
this.metaTable.set(hashIndex, meta);
/* Add the headnode's size to the graph data's size. */
this.sizeOfGraphDataInMem = this.sizeOfGraphDataInMem + newVertexSize;
/* Set the bitmap's refered bit to 1 */
setBitmapTrue(hashIndex, meta.activeCount - 1, this.bitmaps);
// Add a new Integer into the bitmaps every 8 nodes added.
if (this.sizeForAll % 32 == 1) {
this.sizeOfBitmapsInMem = this.sizeOfBitmapsInMem + this.sizeOfInteger
+ this.sizeOfRef;
}
onVertexAdded();
}
@Override
public Vertex get(int index) {
// When it's the first time to access head node,
// sort the buckets into an order.
if (index == 0) {
sortBucketsForAll();
}
// Locate the bucket for the index.
int activeCount = 0;
int i = 0;
while ((activeCount = this.metaTable.get(this.sortedBucketIndexList[i])
.activeCount) < index + 1) {
index = index - activeCount;
i++;
}
int bucketIndex = this.sortedBucketIndexList[i];
// Locate the nodeIndex in bucket for the index.
int nodeIndex = 0; // prictical node index.
int counter = 0; // active counter.
while (counter < index + 1) {
// If the bit for the nodeIndex is active
if (getBitmap(bucketIndex, nodeIndex, this.bitmaps)) {
counter++;
}
nodeIndex++;
}
nodeIndex = nodeIndex - 1;
// If the bucket is on disk.
if (this.metaTable.get(bucketIndex).onDiskFlag) {
long bucketLength = this.metaTable.get(bucketIndex).length;
long idleLength = 0; // Idle space that can be swap out.
for (int j = 0; j < i; j++) {
int m = this.sortedBucketIndexList[j];
// If the m bucket is in memory, swap it out,
// and add the length to the idleLength.
if (!this.metaTable.get(m).onDiskFlag) {
idleLength = idleLength + this.metaTable.get(m).length;
try {
saveBucket(m);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
// When idle is enough, break the for.
if (idleLength >= bucketLength) {
break;
}
}
/* Load bucket */
try {
loadBucket(bucketIndex);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
// For set method use.
this.currentBucketIndex = bucketIndex;
this.currentNodeIndex = nodeIndex;
return this.hashBuckets.get(bucketIndex).get(nodeIndex);
}
@Override
public Vertex getForAll(int index) {
// When it's the first time to access head node,
// sort the buckets into an order.
if (index == 0) {
sortBucketsForAll();
}
// Locate the bucket for the index.
int count = 0;
int i = 0;
while ((count = this.metaTable.get(this.sortedBucketIndexList[i])
.count) < index + 1) {
index = index - count;
// Set the accessed bucket's referrence to null.
// this.hashBuckets.set(this.sortedBucketIndexList[i], null);
i++;
}
int bucketIndex = this.sortedBucketIndexList[i];
// If the bucket is on disk.
if (this.metaTable.get(bucketIndex).onDiskFlag) {
long bucketLength = this.metaTable.get(bucketIndex).length;
long idleLength = this.sizeThreshold - this.sizeOfGraphDataInMem;
// Idle space that can be swap out.
// Look up the bucket before the i bucket for in-memory bucket's space.
// Swap the in-memory buckets out on disk until the idleLength
// enough for the on-disk bucket to swap in.
for (int j = 0; j < i; j++) {
// When idle is enough, break the for.
if (idleLength >= bucketLength) {
break;
}
int m = this.sortedBucketIndexList[j];
// If the m bucket is in memory, swap it out,
// and add the length to the idleLength.
if (!this.metaTable.get(m).onDiskFlag) {
idleLength = idleLength + this.metaTable.get(m).length;
try {
saveBucket(m);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
}
/* Load bucket */
try {
loadBucket(bucketIndex);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
// For set method use.
this.currentBucketIndex = bucketIndex;
this.currentNodeIndex = index;
return this.hashBuckets.get(bucketIndex).get(index);
}
@Override
public void set(int index, Vertex vertex, boolean activeState) {
this.hashBuckets.get(this.currentBucketIndex).set(this.currentNodeIndex,
vertex);
boolean oldActiveState = this.getBitmap(this.currentBucketIndex,
this.currentNodeIndex, this.bitmaps);
if (activeState) {
if (!oldActiveState) {
this.setBitmapTrue(this.currentBucketIndex, this.currentNodeIndex,
this.bitmaps);
this.metaTable.get(this.currentBucketIndex).activeCount++;
}
} else {
if (oldActiveState) {
this.setBitmapFalse(this.currentBucketIndex, this.currentNodeIndex,
this.bitmaps);
this.metaTable.get(this.currentBucketIndex).activeCount--;
}
}
}
@Override
public int size() {
int count = 0;
for (BucketMeta meta : this.metaTable) {
count = count + meta.activeCount;
}
return count;
}
@Override
public int sizeForAll() {
return this.sizeForAll;
}
private void onVertexAdded() {
// When the graph data's size exceeds the threshold.
if (this.sizeOfGraphDataInMem + this.sizeOfBitmapsInMem >=
this.sizeThreshold) {
// If the root dir does not exit, create it.
if (!this.fileRoot.exists()) {
this.fileRoot.mkdirs();
}
// If the graph data dir does not exit, create it.
if (!this.graphDataFile.exists()) {
this.graphDataFile.mkdir();
}
int bucketIndex = findLongestBucket();
try {
saveBucket(bucketIndex);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
}
/**
* Find the longest bucket now in the memory.
* @return bucketIndex
*/
private int findLongestBucket() {
int bucketIndex = 0;
long longestLength = 0;
for (int i = 0; i < this.hashBucketNumber; i++) {
BucketMeta meta = metaTable.get(i);
// Find the longest but lengthInMemory is not zero.
if (meta.length > longestLength && meta.lengthInMemory != 0) {
longestLength = meta.length;
bucketIndex = i;
}
}
return bucketIndex;
}
/**
* Save the given bucket to the disk file.
* @param index
* of the bucket
* @throws IOException
*/
@SuppressWarnings("unchecked")
private void saveBucket(int index) throws IOException {
long start = System.currentTimeMillis();
/* Clock */
this.graphDataFileBucket = new File(this.graphDataFile + "/" + "bucket-"
+ index);
boolean isNewFile = false;
// The bucket file does not exit, create it.
if (!this.graphDataFileBucket.exists()) {
this.graphDataFileBucket.createNewFile();
isNewFile = true;
}
// Append to the bucket file by line.
this.fwGraphData = new FileWriter(graphDataFileBucket, true);
this.bwGraphData = new BufferedWriter(fwGraphData, 65536);
if (isNewFile) {
// Write the file header.
this.bwGraphData.write(Constants.GRAPH_BUCKET_FILE_HEADER + "-" + index);
}
ArrayList<Vertex> hashBucket = this.hashBuckets.get(index);
for (int i = 0; i < hashBucket.size(); i++) {
this.bwGraphData.newLine();
this.bwGraphData.write(hashBucket.get(i).intoString());
}
this.bwGraphData.close();
this.fwGraphData.close();
// Update the meta data for the bucket.
BucketMeta meta = metaTable.get(index);
// Update the size fo graph data.
this.sizeOfGraphDataInMem = this.sizeOfGraphDataInMem - meta.lengthInMemory;
meta.onDiskFlag = true; // Set the on disk flag true.
meta.lengthInMemory = 0; // Set the length in memory to 0.
metaTable.set(index, meta);
// Set the bucket's reference in the list to null.
this.hashBuckets.set(index, null);
this.writeDiskTime = this.writeDiskTime
+ (System.currentTimeMillis() - start);
}
/**
* Load the given bucket from disk file into the memory array list.
* @param index int
* @throws IOException e
*/
@SuppressWarnings("unchecked")
private void loadBucket(int index) throws IOException {
long start = System.currentTimeMillis();
/* Clock */
this.graphDataFileBucket = new File(this.graphDataFile + "/" + "bucket-"
+ index);
if (!this.graphDataFileBucket.exists()) {
throw new IOException("Bucket file does not exit!");
}
// Open file readers.
this.frGraphData = new FileReader(this.graphDataFileBucket);
this.brGraphData = new BufferedReader(this.frGraphData);
// Read the file header.
@SuppressWarnings("unused")
String bucketHeader = this.brGraphData.readLine();
ArrayList<Vertex> hashBucket = this.hashBuckets.get(index);
if (hashBucket == null) {
hashBucket = new ArrayList<Vertex>();
}
String vertexData;
try {
while ((vertexData = this.brGraphData.readLine()) != null) {
Vertex vertex = this.vertexClass.newInstance();
vertex.fromString(vertexData);
hashBucket.add(vertex);
}
} catch (Exception e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
this.brGraphData.close();
this.frGraphData.close();
// Update the meta data for the bucket.
BucketMeta meta = metaTable.get(index);
// Update the size of graph data.
this.sizeOfGraphDataInMem = this.sizeOfGraphDataInMem
+ (meta.length - meta.lengthInMemory);
meta.onDiskFlag = false;
meta.lengthInMemory = meta.length;
metaTable.set(index, meta);
this.hashBuckets.set(index, hashBucket);
if (!this.graphDataFileBucket.delete()) {
throw new IOException("Bucket file delete failed!");
}
this.readDiskTime = this.readDiskTime
+ (System.currentTimeMillis() - start);
}
/**
* Set the bitmap's bit for the hashIndex hash bucket's nodeIndex headnode to
* be 1.
* @param hashIndex int
* @param nodeIndex int
* @param aBitmaps
*/
private void setBitmapTrue(int hashIndex, int nodeIndex,
ArrayList<ArrayList<Integer>> aBitmaps) {
// Get the refered bitmap of the bucket.
ArrayList<Integer> bitmap = aBitmaps.get(hashIndex);
// The node bit belongs to the point int element of the array.
int point = nodeIndex / 32;
// If the nodeIndex over the bitmap's size, append new 32 bit.
if ((point + 1) > bitmap.size()) {
bitmap.add(new Integer(0));
}
// The bit shift number for the int element.
int shift = 31 - nodeIndex % 32;
// The unit for the int element to or.
int orUnit = 1;
orUnit = orUnit << shift;
// Or the int element with the unit to set the bit to 1.
bitmap.set(point, bitmap.get(point) | orUnit);
// Set back the bitmap into the bucket.
aBitmaps.set(hashIndex, bitmap);
}
/**
* Set the bitmap's bit for the hashIndex hash bucket's nodeIndex headnode to
* be 0.
* @param hashIndex
* @param nodeIndex
* @param aBitmaps
*/
private void setBitmapFalse(int hashIndex, int nodeIndex,
ArrayList<ArrayList<Integer>> aBitmaps) {
// Get the refered bitmap of the bucket.
ArrayList<Integer> bitmap = aBitmaps.get(hashIndex);
// The node bit belongs to the point int element of the array.
int point = nodeIndex / 32;
// If the nodeIndex over the bitmap's size, append new 32 bit.
if ((point + 1) > bitmap.size()) {
bitmap.add(new Integer(0));
}
// The bit shift number for the int element.
int shift = 31 - nodeIndex % 32;
// The unit for the int element to and.
int andUnit = 1;
andUnit = andUnit << shift;
andUnit = ~andUnit;
// And the int element with the unit to set the bit to 0.
bitmap.set(point, bitmap.get(point) & andUnit);
// Set back the bitmap into the bucket.
aBitmaps.set(hashIndex, bitmap);
}
/**
* Get the bitmap's bit for the hashIndex hash bucket's nodeIndex headnode.
* If it's 1, returns true, otherwise, returns false.
* @param hashIndex
* @param nodeIndex
* @param aBitmaps
* @return
*/
private boolean getBitmap(int hashIndex, int nodeIndex,
ArrayList<ArrayList<Integer>> aBitmaps) {
// Get the refered bitmap of the bucket.
ArrayList<Integer> bitmap = aBitmaps.get(hashIndex);
// The node bit belongs to the point int element of the array.
int point = nodeIndex / 32;
// The bit shift number for the int element.
int shift = 31 - nodeIndex % 32;
// The unit for the int element to and.
int andUnit = 1;
andUnit = andUnit << shift;
// And the int element with the unit to get the referred bit.
int result = bitmap.get(point) & andUnit;
return !(result == 0);
}
/**
* Sort the buckets for traversing all the nodes. After this operation, the
* buckets are in a order as in-memory buckets descending by length at first,
* and on-disk buckets ascending by length at last.
*/
private void sortBucketsForAll() {
int begin = 0;
int end = this.hashBucketNumber - 1;
// 1st loop, sink all the on disk buckets to end.
for (int i = 0; i < this.hashBucketNumber; i++) {
BucketMeta meta = this.metaTable.get(i);
if (meta.onDiskFlag) { // The on disk bucket should sink.
this.sortedBucketIndexList[end] = i;
end--;
} else {
this.sortedBucketIndexList[begin] = i;
begin++;
}
}
// come here, end points to the end of buckets in memory;
// begin points to the start of the buckets on disk.
// 2nd loop, bubble sort the in memory buckets by length descending.
for (int i = 0; i < end; i++) {
for (int j = i + 1; j <= end; j++) {
if (this.metaTable.get(this.sortedBucketIndexList[i]).length
< this.metaTable.get(this.sortedBucketIndexList[j]).length) {
// Swap
int temp = this.sortedBucketIndexList[i];
this.sortedBucketIndexList[i] = this.sortedBucketIndexList[j];
this.sortedBucketIndexList[j] = temp;
}
}
}
// 3rd loop, bubble sort the on disk buckets by length ascending.
for (int i = begin; i < this.hashBucketNumber - 1; i++) {
for (int j = i + 1; j <= this.hashBucketNumber - 1; j++) {
if (this.metaTable.get(this.sortedBucketIndexList[i]).length >
this.metaTable.get(this.sortedBucketIndexList[j]).length) {
int temp = this.sortedBucketIndexList[i];
this.sortedBucketIndexList[i] = this.sortedBucketIndexList[j];
this.sortedBucketIndexList[j] = temp;
}
}
}
}
@Override
public void clean() {
for (int i = 0; i < this.hashBucketNumber; i++) {
this.graphDataFileBucket = new File(this.graphDataFile + "/" + "bucket-"
+ i);
if (this.graphDataFileBucket.exists()) {
if (!this.graphDataFileBucket.delete()) {
LOG.warn("[File] Delete file:" + this.graphDataFileBucket
+ " failed!");
}
}
}
if (this.graphDataFile.exists()) {
if (!this.graphDataFile.delete()) {
LOG.warn("[File] Delete directory:" + this.graphDataFile + " failed!");
}
}
}
@Override
public void finishAdd() {
if (this.totalCountOfVertex > 0) {
this.sizeOfVertex = (int) (this.totalSizeOfVertex /
this.totalCountOfVertex);
}
LOG.info("[Graph Data For Disk] Finish loading data!!!");
// showMemoryInfo();
this.sizeThreshold = this.sizeThreshold - this.sizeOfBitmapsInMem;
// Sort the buckets for all in memory buckets on the left
// descending by length, and the buckets with part on disk
// on the right ascending by length.
sortBucketsForAll();
// Traverse from right to left, and best to load buckets
// that part on disk all into memory, if too big for memory
// to contain, then write it the whole on to disk.
for (int i = this.hashBucketNumber - 1; i >= 0; i--) {
int bucketIndex = this.sortedBucketIndexList[i];
BucketMeta meta = this.metaTable.get(bucketIndex);
// If the bucket has part on disk and part in memory.
if (meta.lengthInMemory > 0 && meta.lengthInMemory < meta.length) {
long sizeOnDisk = meta.length - meta.lengthInMemory;
// The memory still has enough space for the size on disk.
if (sizeOnDisk < (this.sizeThreshold - this.sizeOfGraphDataInMem)) {
try {
loadBucket(bucketIndex);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
} else { // The memory cannot contain the size on disk.
try {
saveBucket(bucketIndex);
} catch (IOException e) {
throw new RuntimeException("[GraphDataForDisk] caught:", e);
}
}
}
}
this.showSortedHashBucketsInfo();
}
@Override
public long getActiveCounter() {
int count = 0;
for (BucketMeta meta : this.metaTable) {
count = count + meta.activeCount;
}
return count;
}
@Override
public boolean getActiveFlagForAll(int index) {
return getBitmap(this.currentBucketIndex, this.currentNodeIndex,
this.bitmaps);
}
/** Show the information of graph data Memory used.*/
@Override
public void showMemoryInfo() {
LOG.info("----------------- Memory Info of Graph -----------------");
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
MemoryUsage memoryUsage = memoryMXBean.getHeapMemoryUsage();
long used = memoryUsage.getUsed();
long committed = memoryUsage.getCommitted();
LOG.info("<Real> [Memory used] = " + used / MB_SIZE + "MB");
LOG.info("<Real> [Memory committed] = " + committed / MB_SIZE + "MB");
LOG.info("<Evaluate> [size of Vertex] = " + this.sizeOfVertex + "B");
LOG.info("<Evaluate> [total size of Vertex] = " + this.totalSizeOfVertex
/ MB_SIZE + "MB");
LOG.info("<Evaluate> [total count of Vertex] = " + this.totalCountOfVertex);
LOG.info("<Evaluate> [total count of Edge] = " + this.totalCountOfEdge);
LOG.info("<Evaluate> [size fo MetaTable In Memory] = "
+ this.sizeOfMetaTable / KB_SIZE + "KB");
LOG.info("<Evaluate> [size of Graph Data In Memory] = "
+ this.sizeOfGraphDataInMem / MB_SIZE + "MB");
LOG.info("<Evaluate> [size fo Bitmaps In Memory] = "
+ this.sizeOfBitmapsInMem / KB_SIZE + "KB");
LOG.info("<Evaluate> [size of Graph Data Threshold] = "
+ this.sizeThreshold / MB_SIZE + "MB");
LOG.info("----------------- -------------------- -----------------");
// this.showHashBucketsInfo();
LOG.info("[==>Clock<==] <GraphDataForDisk: save bucket> totally used "
+ this.writeDiskTime / 1000f + " seconds");
LOG.info("[==>Clock<==] <GraphDataForDisk: load bucket> totally used "
+ this.readDiskTime / 1000f + " seconds");
LOG.info("[==>Clock<==] <GraphDataForDisk: Disk I/O> totally used "
+ (this.writeDiskTime + this.readDiskTime) / 1000f + " seconds");
this.writeDiskTime = 0;
this.readDiskTime = 0;
}
/** Show the information of Graph Hash Bucks. */
private void showHashBucketsInfo() {
LOG.info("------------ Buckets Info of Graph ------------");
long maxLength = 0;
for (int i = 0; i < this.metaTable.size(); i++) {
BucketMeta meta = this.metaTable.get(i);
if (meta.length > maxLength) {
maxLength = meta.length;
}
}
for (int i = 0; i < this.metaTable.size(); i++) {
BucketMeta meta = this.metaTable.get(i);
String out = "[Bucket-" + i + "] ";
if (meta.onDiskFlag) {
out = out + "OnDisk ";
} else {
out = out + " ";
}
out = out + meta.lengthInMemory / MB_SIZE + "MB - " + meta.length
/ MB_SIZE + "MB ";
int nMax = 30;
int nAll = (int) (nMax * ((float) meta.length / (float) maxLength));
int nMem = (int) (nAll * ((float) meta.lengthInMemory /
(float) meta.length));
int nDisk = nAll - nMem;
for (int j = 0; j < nMem; j++) {
out = out + "-";
}
for (int j = 0; j < nDisk; j++) {
out = out + "*";
}
LOG.info(out);
}
LOG.info("------------ --------------------- ------------");
}
/** Show the information of sorted hash bucket buckets*/
private void showSortedHashBucketsInfo() {
LOG.info("------------ Buckets Info of Graph ------------");
long maxLength = 0;
for (int i = 0; i < this.metaTable.size(); i++) {
BucketMeta meta = this.metaTable.get(i);
if (meta.length > maxLength) {
maxLength = meta.length;
}
}
for (int i = 0; i < this.sortedBucketIndexList.length; i++) {
int p = this.sortedBucketIndexList[i];
BucketMeta meta = this.metaTable.get(p);
String out = "[Bucket-" + p + "] ";
if (meta.onDiskFlag) {
out = out + "OnDisk ";
} else {
out = out + " ";
}
out = out + meta.lengthInMemory / MB_SIZE + "MB - " + meta.length
/ MB_SIZE + "MB ";
int nMax = 30;
int nAll = (int) (nMax * ((float) meta.length / (float) maxLength));
int nMem = (int) (nAll * ((float) meta.lengthInMemory /
(float) meta.length));
int nDisk = nAll - nMem;
for (int j = 0; j < nMem; j++) {
out = out + "-";
}
for (int j = 0; j < nDisk; j++) {
out = out + "*";
}
LOG.info(out);
}
LOG.info("------------ --------------------- ------------");
}
@Override
public int getEdgeSize() {
return this.totalCountOfEdge;
}
@Override
public void processingByBucket(GraphStaffHandler graphStaffHandler, BSP bsp,
BSPJob job, int superStepCounter, BSPStaffContext context)
throws IOException {
int tmpCounter = sizeForAll();
Vertex vertex;
boolean activeFlag;
for (int i = 0; i < tmpCounter; i++) {
long tmpStart = System.currentTimeMillis();
vertex = getForAll(i);
if (vertex == null) {
org.mortbay.log.Log.info("[ERROR]Fail to get the HeadNode of index["
+ i + "] " + "and the system will skip the record");
continue;
}
activeFlag = this.getActiveFlagForAll(i);
graphStaffHandler.vertexProcessing(vertex, bsp, job, superStepCounter,
context, activeFlag);
this.set(i, context.getVertex(), context.getActiveFLag());
}
}
@Override
public void saveAllVertices(GraphStaffHandler graphStaffHandler,
RecordWriter output) throws IOException, InterruptedException {
int tmpCounter = sizeForAll();
Vertex vertex;
for (int i = 0; i < tmpCounter; i++) {
long tmpStart = System.currentTimeMillis();
vertex = getForAll(i);
if (vertex == null) {
org.mortbay.log.Log.info("[ERROR]Fail to save the HeadNode of index["
+ i + "] " + "and the system will skip the record");
continue;
}
graphStaffHandler.saveResultOfVertex(vertex, output);
}
}
@Override
public void saveAllVertices(RecordWriter output) throws IOException,
InterruptedException {
for (int i = 0; i < sizeForAll(); i++) {
Vertex<?, ?, Edge> vertex = getForAll(i);
StringBuffer outEdges = new StringBuffer();
for (Edge edge : vertex.getAllEdges()) {
outEdges.append(edge.getVertexID() + Constants.SPLIT_FLAG
+ edge.getEdgeValue() + Constants.SPACE_SPLIT_FLAG);
}
if (outEdges.length() > 0) {
int j = outEdges.length();
outEdges.delete(j - 1, j - 1);
}
output.write(new Text(vertex.getVertexID() + Constants.SPLIT_FLAG
+ vertex.getVertexValue()), new Text(outEdges.toString()));
}
}
/**
* Set BSP staff.
* @param staff
*/
@Override
public void setStaff(Staff staff) {
this.staff = staff;
}
@Override
public int getVertexSize() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void getAllVertex(GraphStaffHandler graphStaffHandler, CommunicatorInterface communicator,
RecordWriter output) throws IOException,
InterruptedException {
// TODO Auto-generated method stub
}
@Override
public void setMigratedStaffFlag(boolean flag) {
// TODO Auto-generated method stub
}
@Override
public void setRecovryFlag(boolean recovery) {
// TODO Auto-generated method stub
}
public ArrayList<ArrayList<Vertex>> getHashBuckets() {
return hashBuckets;
}
public ArrayList<BucketMeta> getMetaTable() {
return metaTable;
}
public void setTotalSizeOfVertex(long totalSizeOfVertex) {
this.totalSizeOfVertex = totalSizeOfVertex;
}
public void setTotalCountOfEdge(int totalCountOfEdge) {
this.totalCountOfEdge = totalCountOfEdge;
}
public void setSizeForAll(int sizeForAll) {
this.sizeForAll = sizeForAll;
}
public void setTotalCountOfVertex(int totalCountOfVertex) {
this.totalCountOfVertex = totalCountOfVertex;
}
@Override
public void processMirrorSync() {
// TODO Auto-generated method stub
}
@Override
public SGAGraphdataStore getVertexStore() {
// TODO Auto-generated method stub
return null;
}
@Override
public HashMap<String, Integer> chooseMigrateVertex(int maxMigrateNum,
float minMigrateFactor, byte[] bs) {
// TODO Auto-generated method stub
return null;
}
}
| |
package org.apache.solr.core;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.store.Directory;
import org.apache.solr.update.SolrIndexWriter;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A wrapper for an IndexDeletionPolicy instance.
* <p/>
* Provides features for looking up IndexCommit given a version. Allows reserving index
* commit points for certain amounts of time to support features such as index replication
* or snapshooting directly out of a live index directory.
*
*
* @see org.apache.lucene.index.IndexDeletionPolicy
*/
public class IndexDeletionPolicyWrapper implements IndexDeletionPolicy {
private final IndexDeletionPolicy deletionPolicy;
private volatile Map<Long, IndexCommit> solrVersionVsCommits = new ConcurrentHashMap<Long, IndexCommit>();
private final Map<Long, Long> reserves = new ConcurrentHashMap<Long,Long>();
private volatile IndexCommit latestCommit;
private final ConcurrentHashMap<Long, AtomicInteger> savedCommits = new ConcurrentHashMap<Long, AtomicInteger>();
public IndexDeletionPolicyWrapper(IndexDeletionPolicy deletionPolicy) {
this.deletionPolicy = deletionPolicy;
}
/**
* Gets the most recent commit point
* <p/>
* It is recommended to reserve a commit point for the duration of usage so that
* it is not deleted by the underlying deletion policy
*
* @return the most recent commit point
*/
public IndexCommit getLatestCommit() {
return latestCommit;
}
public IndexDeletionPolicy getWrappedDeletionPolicy() {
return deletionPolicy;
}
/**
* Set the duration for which commit point is to be reserved by the deletion policy.
*
* @param indexGen gen of the commit point to be reserved
* @param reserveTime time in milliseconds for which the commit point is to be reserved
*/
public void setReserveDuration(Long indexGen, long reserveTime) {
long timeToSet = System.currentTimeMillis() + reserveTime;
for(;;) {
Long previousTime = reserves.put(indexGen, timeToSet);
// this is the common success case: the older time didn't exist, or
// came before the new time.
if (previousTime == null || previousTime <= timeToSet) break;
// At this point, we overwrote a longer reservation, so we want to restore the older one.
// the problem is that an even longer reservation may come in concurrently
// and we don't want to overwrite that one too. We simply keep retrying in a loop
// with the maximum time value we have seen.
timeToSet = previousTime;
}
}
private void cleanReserves() {
long currentTime = System.currentTimeMillis();
for (Map.Entry<Long, Long> entry : reserves.entrySet()) {
if (entry.getValue() < currentTime) {
reserves.remove(entry.getKey());
}
}
}
private List<IndexCommitWrapper> wrap(List<IndexCommit> list) {
List<IndexCommitWrapper> result = new ArrayList<IndexCommitWrapper>();
for (IndexCommit indexCommit : list) result.add(new IndexCommitWrapper(indexCommit));
return result;
}
/** Permanently prevent this commit point from being deleted.
* A counter is used to allow a commit point to be correctly saved and released
* multiple times. */
public synchronized void saveCommitPoint(Long indexCommitGen) {
AtomicInteger reserveCount = savedCommits.get(indexCommitGen);
if (reserveCount == null) reserveCount = new AtomicInteger();
reserveCount.incrementAndGet();
savedCommits.put(indexCommitGen, reserveCount);
}
/** Release a previously saved commit point */
public synchronized void releaseCommitPoint(Long indexCommitGen) {
AtomicInteger reserveCount = savedCommits.get(indexCommitGen);
if (reserveCount == null) return;// this should not happen
if (reserveCount.decrementAndGet() <= 0) {
savedCommits.remove(indexCommitGen);
}
}
/**
* Internal use for Lucene... do not explicitly call.
*/
public void onInit(List list) throws IOException {
List<IndexCommitWrapper> wrapperList = wrap(list);
deletionPolicy.onInit(wrapperList);
updateCommitPoints(wrapperList);
cleanReserves();
}
/**
* Internal use for Lucene... do not explicitly call.
*/
public void onCommit(List list) throws IOException {
List<IndexCommitWrapper> wrapperList = wrap(list);
deletionPolicy.onCommit(wrapperList);
updateCommitPoints(wrapperList);
cleanReserves();
}
private class IndexCommitWrapper extends IndexCommit {
IndexCommit delegate;
IndexCommitWrapper(IndexCommit delegate) {
this.delegate = delegate;
}
@Override
public String getSegmentsFileName() {
return delegate.getSegmentsFileName();
}
@Override
public Collection getFileNames() throws IOException {
return delegate.getFileNames();
}
@Override
public Directory getDirectory() {
return delegate.getDirectory();
}
@Override
public void delete() {
Long gen = delegate.getGeneration();
Long reserve = reserves.get(gen);
if (reserve != null && System.currentTimeMillis() < reserve) return;
if(savedCommits.containsKey(gen)) return;
delegate.delete();
}
@Override
public int getSegmentCount() {
return delegate.getSegmentCount();
}
@Override
public boolean equals(Object o) {
return delegate.equals(o);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
@Override
public long getGeneration() {
return delegate.getGeneration();
}
@Override
public boolean isDeleted() {
return delegate.isDeleted();
}
@Override
public Map getUserData() throws IOException {
return delegate.getUserData();
}
}
/**
* @param gen the gen of the commit point
* @return a commit point corresponding to the given version
*/
public IndexCommit getCommitPoint(Long gen) {
return solrVersionVsCommits.get(gen);
}
/**
* Gets the commit points for the index.
* This map instance may change between commits and commit points may be deleted.
* It is recommended to reserve a commit point for the duration of usage
*
* @return a Map of version to commit points
*/
public Map<Long, IndexCommit> getCommits() {
return solrVersionVsCommits;
}
private void updateCommitPoints(List<IndexCommitWrapper> list) {
Map<Long, IndexCommit> map = new ConcurrentHashMap<Long, IndexCommit>();
for (IndexCommitWrapper wrapper : list) {
if (!wrapper.isDeleted())
map.put(wrapper.delegate.getGeneration(), wrapper.delegate);
}
solrVersionVsCommits = map;
latestCommit = ((list.get(list.size() - 1)).delegate);
}
public static long getCommitTimestamp(IndexCommit commit) throws IOException {
final Map<String,String> commitData = commit.getUserData();
String commitTime = commitData.get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY);
if (commitTime != null) {
return Long.parseLong(commitTime);
} else {
return 0;
}
}
}
| |
package org.diorite.impl.client;
import static org.lwjgl.glfw.Callbacks.errorCallbackPrint;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.system.MemoryUtil.NULL;
import java.net.InetAddress;
import java.net.Proxy;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.Collections;
import org.lwjgl.Sys;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.glfw.GLFWKeyCallback;
import org.lwjgl.glfw.GLFWWindowSizeCallback;
import org.lwjgl.glfw.GLFWvidmode;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GLContext;
import org.diorite.impl.CoreMain;
import org.diorite.impl.DioriteCore;
import org.diorite.impl.client.connection.ClientConnection;
import org.diorite.utils.math.DioriteRandomUtils;
import joptsimple.OptionSet;
public class Main
{
static
{
SharedLibraryLoader.load();
System.out.println("Loaded LWJGL library.");
}
// We need to strongly reference callback instances.
private GLFWErrorCallback errorCallback;
private GLFWKeyCallback keyCallback;
private GLFWWindowSizeCallback resizeCallback;
private volatile int width;
private volatile int height;
private volatile boolean stop = false;
// The window handle
private long window;
public void run(final OptionSet options)
{
System.out.println("Hello LWJGL " + Sys.getVersion() + "!");
this.width = (int) options.valueOf("width");
this.height = (int) options.valueOf("height");
final Thread thread = new Thread(() -> {
try
{
this.init();
this.loop();
// Release window and window callbacks
glfwDestroyWindow(this.window);
DioriteCore.getInstance().stop();
} finally
{
this.cleanup();
}
}, "GL");
thread.setDaemon(false);
thread.start();
try
{
new DioriteClient(Proxy.NO_PROXY, options).start(options);
} catch (final Throwable e)
{
e.printStackTrace();
} finally
{
this.stop = true;
}
}
private void cleanup()
{
this.keyCallback.release();
glfwTerminate();
this.errorCallback.release();
this.resizeCallback.release();
}
private void init()
{
// Setup an error callback. The default implementation
// will print the error message in System.err.
glfwSetErrorCallback(this.errorCallback = errorCallbackPrint(System.err));
// Initialize GLFW. Most GLFW functions will not work before doing this.
if (glfwInit() != GL11.GL_TRUE)
{
throw new IllegalStateException("Unable to initialize GLFW");
}
// Configure our window
glfwDefaultWindowHints(); // optional, the current window hints are already the default
glfwWindowHint(GLFW_VISIBLE, GL_FALSE); // the window will stay hidden after creation
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE); // the window will be resizable
// Create the window
this.window = glfwCreateWindow(this.width, this.height, "DioritOS", NULL, NULL);
if (this.window == NULL)
{
throw new RuntimeException("Failed to create the GLFW window");
}
this.resizeCallback = GLFWWindowSizeCallback(this::handleResize);
// Setup a key callback. It will be called every time a key is pressed, repeated or released.
glfwSetKeyCallback(this.window, this.keyCallback = new MyGLFWResizeKeyCallback());
// Get the resolution of the primary monitor
final ByteBuffer vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
// Center our window
glfwSetWindowPos(this.window, (GLFWvidmode.width(vidmode) - this.width) / 2, (GLFWvidmode.height(vidmode) - this.height) / 2);
// Make the OpenGL context current
glfwMakeContextCurrent(this.window);
// Enable v-sync
glfwSwapInterval(1);
// Make the window visible
glfwShowWindow(this.window);
glfwSetWindowSizeCallback(this.window, this.resizeCallback);
}
private void handleResize(final long window, final int width, final int height)
{
if ((width != this.width) || (height != this.height))
{
Main.this.width = width;
Main.this.height = height;
// Get the resolution of the primary monitor
final ByteBuffer vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
// Center our window
glfwSetWindowPos(this.window, (GLFWvidmode.width(vidmode) - this.width) / 2, (GLFWvidmode.height(vidmode) - this.height) / 2);
glViewport(0, 0, width, height);
}
}
private void loop()
{
// This line is critical for LWJGL's interoperation with GLFW's
// OpenGL context, or any context that is managed externally.
// LWJGL detects the context that is current in the current thread,
// creates the ContextCapabilities instance and makes the OpenGL
// bindings available for use.
GLContext.createFromCurrent();
// Set the clear color
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Run the rendering loop until the user has attempted to close
// the window or has pressed the ESCAPE key.
while (glfwWindowShouldClose(this.window) == GL_FALSE)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear the framebuffer
// useless code, deal with it.
for (int i = 0, k = DioriteRandomUtils.nextInt(50); i < k; i++)
{
rect(DioriteRandomUtils.getRandDouble(- 1, 1), DioriteRandomUtils.getRandDouble(- 1, 1), DioriteRandomUtils.getRandDouble(- 1, 1), DioriteRandomUtils.getRandDouble(- 1, 1), DioriteRandomUtils.getRandDouble(0, 5));
}
glfwSwapBuffers(this.window); // swap the color buffers
// Poll for window events. The key callback above will only be
// invoked during this call.
glfwPollEvents();
if (this.stop)
{
glfwSetWindowShouldClose(this.window, GL_TRUE);
break;
}
try
{
Thread.sleep(300);
} catch (InterruptedException e)
{
e.printStackTrace();
}
}
}
public static void rect(double x, double y, double width, double height, double r)
{
glPushMatrix();
glTranslated(x, y, 0);
glRotated(r, 0, 0, 1);
glColor4f(DioriteRandomUtils.nextFloat(), DioriteRandomUtils.nextFloat(), DioriteRandomUtils.nextFloat(), DioriteRandomUtils.nextFloat());
glBegin(GL_QUADS);
glVertex2d(0, 0);
glVertex2d(0, height);
glVertex2d(width, height);
glVertex2d(width, 0);
glEnd();
glPopMatrix();
}
public static void main(final String[] args)
{
DioriteCore.getInitPipeline().addLast("Diorite|initConnection", (s, p, data) -> {
s.setHostname(data.options.has("hostname") ? data.options.valueOf("hostname").toString() : s.getConfig().getHostname());
s.setPort(data.options.has("port") ? (int) data.options.valueOf("port") : s.getConfig().getPort());
s.setConnectionHandler(new ClientConnection(s));
s.getConnectionHandler().start();
});
DioriteCore.getStartPipeline().addLast("DioriteCore|Run", (s, p, options) -> {
System.out.println("Started Diorite v" + s.getVersion() + " core!");
s.run();
});
//
// // TODO: remove that, client should be able to select server.
DioriteCore.getStartPipeline().addBefore("DioriteCore|Run", "Diorite|bindConnection", (s, p, options) -> {
try
{
System.setProperty("io.netty.eventLoopThreads", options.has("netty") ? options.valueOf("netty").toString() : Integer.toString(s.getConfig().getNettyThreads()));
System.out.println("Starting connecting on " + s.getHostname() + ":" + s.getPort());
s.getConnectionHandler().init(InetAddress.getByName(s.getHostname()), s.getPort(), s.getConfig().isUseNativeTransport());
System.out.println("Connected to " + s.getHostname() + ":" + s.getPort());
} catch (final UnknownHostException e)
{
e.printStackTrace();
}
});
final OptionSet options = CoreMain.main(args, false, p -> {
p.acceptsAll(Collections.singletonList("width"), "width of screen").withRequiredArg().ofType(int.class).describedAs("width").defaultsTo(854);
p.acceptsAll(Collections.singletonList("height"), "height of screen").withRequiredArg().ofType(int.class).describedAs("width").defaultsTo(480);
});
new Main().run(options);
}
private static class MyGLFWResizeKeyCallback extends GLFWKeyCallback
{
@Override
public void invoke(final long window, final int key, final int scancode, final int action, final int mods)
{
if ((key == GLFW_KEY_ESCAPE) && (action == GLFW_RELEASE))
{
glfwSetWindowShouldClose(window, GL_TRUE); // We will detect this in our rendering loop
}
}
}
}
| |
/**
* Copyright 2017 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.watson.developer_cloud.document_conversion.v1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import com.google.gson.JsonObject;
import com.ibm.watson.developer_cloud.WatsonServiceUnitTest;
import com.ibm.watson.developer_cloud.document_conversion.v1.model.Answers;
import com.ibm.watson.developer_cloud.document_conversion.v1.model.IndexConfiguration;
import com.ibm.watson.developer_cloud.document_conversion.v1.model.IndexDocumentOptions;
import com.ibm.watson.developer_cloud.document_conversion.v1.model.IndexFields;
import com.ibm.watson.developer_cloud.document_conversion.v1.util.ConversionUtils;
import com.ibm.watson.developer_cloud.http.HttpMediaType;
import okhttp3.HttpUrl;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.RecordedRequest;
import okio.Buffer;
/**
* Document Conversion unit tests.
*/
@SuppressWarnings("resource")
public class DocumentConversionTest extends WatsonServiceUnitTest {
private static final String VERSION = "version";
private static final String CONVERT_DOCUMENT_PATH = "/v1/convert_document";
private static final String INDEX_DOCUMENT_PATH = "/v1/index_document";
private static final String RESOURCE = "src/test/resources/document_conversion/";
private DocumentConversion service;
private final File html;
private InputStream expAnswer;
private InputStream expIndexResponse;
private InputStream expIndexDryRunResponse;
private IndexConfiguration indexConfiguration;
private IndexConfiguration indexConfigWithFields;
private IndexConfiguration indexConfigWithFieldsForDryRun;
/**
* Instantiates a new document conversion test.
*
* @throws Exception the exception
*/
public DocumentConversionTest() throws Exception {
html = new File(RESOURCE + "html-with-extra-content-input.htm");
}
/*
* (non-Javadoc)
*
* @see com.ibm.watson.developer_cloud.WatsonServiceUnitTest#setUp()
*/
@Override
@Before
public void setUp() throws Exception {
super.setUp();
service = new DocumentConversion(DocumentConversion.VERSION_DATE_2015_12_01);
service.setApiKey("");
service.setEndPoint(getMockWebServerUrl());
expAnswer = new FileInputStream(RESOURCE + "html-with-extra-content-input-to-answer.json");
expIndexResponse = new ByteArrayInputStream("{\"status\": \"success\"}".getBytes());
expIndexDryRunResponse = new FileInputStream(RESOURCE + "html-with-extra-content-input-index-dry-run.json");
indexConfiguration = new IndexConfiguration("serviceInstanceId", "clusterId", "searchCollectionName");
IndexFields fields = new IndexFields.Builder().mappings("Author", "Created By")
.mappings("Date Created", "Created On").include("SomeMetadataName").include("id").include("Created By")
.include("Created On").exclude("Category").build();
indexConfigWithFields = new IndexConfiguration("serviceInstanceId", "clusterId", "searchCollectionName", fields);
indexConfigWithFieldsForDryRun = new IndexConfiguration(null, null, null, fields);
}
private RecordedRequest checkRequest(String requestPath) throws InterruptedException {
final RecordedRequest request = server.takeRequest();
final HttpUrl url = HttpUrl.parse(getMockWebServerUrl() + request.getPath());
assertEquals(requestPath, url.encodedPath());
assertEquals(DocumentConversion.VERSION_DATE_2015_12_01, url.queryParameter(VERSION));
assertEquals(POST, request.getMethod());
return request;
}
/**
* Test convert document.
*
* @throws URISyntaxException the URI syntax exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws InterruptedException the interrupted exception
*/
@Test
public void testConvertDocument() throws URISyntaxException, IOException, InterruptedException {
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expAnswer)));
Answers convertedDoc = service.convertDocumentToAnswer(html, null).execute();
checkRequest(CONVERT_DOCUMENT_PATH);
assertNotNull(convertedDoc);
}
/**
* Test convert document.
*
* @throws URISyntaxException the URI syntax exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws InterruptedException the interrupted exception
*/
@Test
public void testConvertDocumentWithMediaType() throws URISyntaxException, IOException, InterruptedException {
// Convert document with a specified media type
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expAnswer)));
Answers convertedDoc = service.convertDocumentToAnswer(html, HttpMediaType.TEXT_HTML).execute();
checkRequest(CONVERT_DOCUMENT_PATH);
assertNotNull(convertedDoc);
}
/**
* Test convert document_with_custom_config.
*
* @throws Exception the exception
*/
@Test
public void testConvertDocumentWithCustomConfig() throws Exception {
JsonObject customConfig = ConversionUtils.loadCustomConfig(new FileInputStream(RESOURCE + "custom_config.json"));
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expAnswer)));
service.convertDocumentToAnswer(html, null, customConfig).execute();
checkRequest(CONVERT_DOCUMENT_PATH);
}
/**
* Test convert document_with_version_date.
*
* @throws Exception the exception
*/
@Test
public void testConvertDocumentWithVersionDate() throws Exception {
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expAnswer)));
service.convertDocumentToAnswer(html).execute();
checkRequest(CONVERT_DOCUMENT_PATH);
}
/**
* Test index document.
*
* @throws Exception the exception
*/
@Test
public void testIndexDocument() throws Exception {
IndexDocumentOptions indexDocumentOptions =
new IndexDocumentOptions.Builder().document(html).dryRun(false).indexConfiguration(indexConfiguration).build();
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expIndexResponse)));
service.indexDocument(indexDocumentOptions).execute();
checkRequest(INDEX_DOCUMENT_PATH);
}
/**
* Test index document with fields.
*
* @throws Exception the exception
*/
@Test
public void testIndexDocumentWithFields() throws Exception {
IndexDocumentOptions indexDocumentOptions = new IndexDocumentOptions.Builder().document(html).dryRun(false)
.indexConfiguration(indexConfigWithFields).build();
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expIndexResponse)));
service.indexDocument(indexDocumentOptions).execute();
checkRequest(INDEX_DOCUMENT_PATH);
}
/**
* Test a dry run of index document with metadata.
*
* @throws Exception the exception
*/
@Test
public void testIndexDocumentDryRun() throws Exception {
Map<String, String> metadata = new HashMap<String, String>();
metadata.put("id", "123");
metadata.put("SomeMetadataName", "SomeMetadataValue");
IndexDocumentOptions indexDocumentOptions =
new IndexDocumentOptions.Builder().document(html).metadata(metadata).dryRun(true).build();
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expIndexDryRunResponse)));
service.indexDocument(indexDocumentOptions).execute();
checkRequest(INDEX_DOCUMENT_PATH);
}
/**
* Test a dry run of index document with metadata and index fields.
*
* @throws Exception the exception
*/
@Test
public void testIndexDocumentWithFieldsDryRun() throws Exception {
Map<String, String> metadata = new HashMap<String, String>();
metadata.put("id", "123");
metadata.put("SomeMetadataName", "SomeMetadataValue");
IndexDocumentOptions indexDocumentOptions = new IndexDocumentOptions.Builder().document(html).metadata(metadata)
.dryRun(true).indexConfiguration(indexConfigWithFieldsForDryRun).build();
server.enqueue(new MockResponse().setBody(new Buffer().readFrom(expIndexDryRunResponse)));
service.indexDocument(indexDocumentOptions).execute();
RecordedRequest request = checkRequest(INDEX_DOCUMENT_PATH);
String body = request.getBody().readUtf8();
// config
assertTrue(body.contains("Content-Disposition: form-data; name=\"config\""));
assertTrue(body.contains("{\"retrieve_and_rank\":{\"dry_run\":true,\"fields\":{\"include\":"
+ "[\"SomeMetadataName\",\"id\",\"Created By\",\"Created On\"]}}}"));
// file
assertTrue(body.contains("Content-Disposition: form-data; name=\"file\""));
assertTrue(body.contains("Content-Type: text/html"));
// metadata
assertTrue(body.contains("Content-Disposition: form-data; name=\"metadata\""));
assertTrue(body.contains("{\"metadata\""));
assertTrue(body.contains("{\"name\":\"id\",\"value\":\"123\"}"));
assertTrue(body.contains("{\"name\":\"SomeMetadataName\",\"value\":\"SomeMetadataValue\"}"));
}
}
| |
package org.apache.helix.integration.manager;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.PreConnectCallback;
import org.apache.helix.PropertyKey;
import org.apache.helix.TestHelper;
import org.apache.helix.ZNRecord;
import org.apache.helix.ZkTestHelper;
import org.apache.helix.ZkUnitTestBase;
import org.apache.helix.manager.zk.CallbackHandler;
import org.apache.helix.manager.zk.ZKHelixDataAccessor;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.apache.helix.mock.participant.MockMSModelFactory;
import org.apache.helix.model.LiveInstance;
import org.apache.helix.tools.ClusterVerifiers.ClusterStateVerifier;
import org.apache.helix.tools.ClusterVerifiers.ClusterStateVerifier.BestPossAndExtViewZkVerifier;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestConsecutiveZkSessionExpiry extends ZkUnitTestBase {
private static Logger LOG = Logger.getLogger(TestConsecutiveZkSessionExpiry.class);
/**
* make use of PreConnectCallback to insert session expiry during HelixManager#handleNewSession()
*/
class PreConnectTestCallback implements PreConnectCallback {
final String instanceName;
final CountDownLatch startCountDown;
final CountDownLatch endCountDown;
int count = 0;
public PreConnectTestCallback(String instanceName, CountDownLatch startCountdown,
CountDownLatch endCountdown) {
this.instanceName = instanceName;
this.startCountDown = startCountdown;
this.endCountDown = endCountdown;
}
@Override
public void onPreConnect() {
// TODO Auto-generated method stub
LOG.info("handleNewSession for instance: " + instanceName + ", count: " + count);
if (count++ == 1) {
startCountDown.countDown();
LOG.info("wait session expiry to happen");
try {
endCountDown.await();
} catch (Exception e) {
LOG.error("interrupted in waiting", e);
}
}
}
}
@Test
public void testParticipant() throws Exception {
// Logger.getRootLogger().setLevel(Level.INFO);
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
final int n = 2;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
TestHelper.setupCluster(clusterName, ZK_ADDR, 12918, // participant port
"localhost", // participant name prefix
"TestDB", // resource name prefix
1, // resources
32, // partitions per resource
n, // number of nodes
2, // replicas
"MasterSlave", true); // do rebalance
// start controller
final ClusterControllerManager controller =
new ClusterControllerManager(ZK_ADDR, clusterName, "controller");
controller.syncStart();
// start participants
CountDownLatch startCountdown = new CountDownLatch(1);
CountDownLatch endCountdown = new CountDownLatch(1);
MockParticipantManager[] participants = new MockParticipantManager[n];
for (int i = 0; i < n; i++) {
final String instanceName = "localhost_" + (12918 + i);
participants[i] = new MockParticipantManager(ZK_ADDR, clusterName, instanceName);
if (i == 0) {
participants[i].addPreConnectCallback(new PreConnectTestCallback(instanceName,
startCountdown, endCountdown));
}
participants[i].syncStart();
}
boolean result =
ClusterStateVerifier
.verifyByZkCallback(new ClusterStateVerifier.BestPossAndExtViewZkVerifier(ZK_ADDR,
clusterName));
Assert.assertTrue(result);
// expire the session of participant
LOG.info("1st Expiring participant session...");
String oldSessionId = participants[0].getSessionId();
ZkTestHelper.asyncExpireSession(participants[0].getZkClient());
String newSessionId = participants[0].getSessionId();
LOG.info("Expried participant session. oldSessionId: " + oldSessionId + ", newSessionId: "
+ newSessionId);
// expire zk session again during HelixManager#handleNewSession()
startCountdown.await();
LOG.info("2nd Expiring participant session...");
oldSessionId = participants[0].getSessionId();
ZkTestHelper.asyncExpireSession(participants[0].getZkClient());
newSessionId = participants[0].getSessionId();
LOG.info("Expried participant session. oldSessionId: " + oldSessionId + ", newSessionId: "
+ newSessionId);
endCountdown.countDown();
result =
ClusterStateVerifier.verifyByPolling(new ClusterStateVerifier.BestPossAndExtViewZkVerifier(
ZK_ADDR, clusterName));
Assert.assertTrue(result);
// clean up
controller.syncStop();
for (int i = 0; i < n; i++) {
participants[i].syncStop();
}
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testDistributedController() throws Exception {
// Logger.getRootLogger().setLevel(Level.INFO);
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
int n = 2;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
TestHelper.setupCluster(clusterName, ZK_ADDR, 12918, // participant port
"localhost", // participant name prefix
"TestDB", // resource name prefix
1, // resources
4, // partitions per resource
n, // number of nodes
2, // replicas
"MasterSlave", true); // do rebalance
ClusterDistributedController[] distributedControllers = new ClusterDistributedController[n];
CountDownLatch startCountdown = new CountDownLatch(1);
CountDownLatch endCountdown = new CountDownLatch(1);
for (int i = 0; i < n; i++) {
String contrllerName = "localhost_" + (12918 + i);
distributedControllers[i] =
new ClusterDistributedController(ZK_ADDR, clusterName, contrllerName);
distributedControllers[i].getStateMachineEngine().registerStateModelFactory("MasterSlave",
new MockMSModelFactory());
if (i == 0) {
distributedControllers[i].addPreConnectCallback(new PreConnectTestCallback(contrllerName,
startCountdown, endCountdown));
}
distributedControllers[i].connect();
}
boolean result =
ClusterStateVerifier.verifyByZkCallback(new BestPossAndExtViewZkVerifier(ZK_ADDR,
clusterName));
Assert.assertTrue(result);
// expire the session of distributedController
LOG.info("1st Expiring distributedController session...");
String oldSessionId = distributedControllers[0].getSessionId();
ZkTestHelper.asyncExpireSession(distributedControllers[0].getZkClient());
String newSessionId = distributedControllers[0].getSessionId();
LOG.info("Expried distributedController session. oldSessionId: " + oldSessionId
+ ", newSessionId: " + newSessionId);
// expire zk session again during HelixManager#handleNewSession()
startCountdown.await();
LOG.info("2nd Expiring distributedController session...");
oldSessionId = distributedControllers[0].getSessionId();
ZkTestHelper.asyncExpireSession(distributedControllers[0].getZkClient());
newSessionId = distributedControllers[0].getSessionId();
LOG.info("Expried distributedController session. oldSessionId: " + oldSessionId
+ ", newSessionId: " + newSessionId);
endCountdown.countDown();
result =
ClusterStateVerifier.verifyByPolling(new ClusterStateVerifier.BestPossAndExtViewZkVerifier(
ZK_ADDR, clusterName));
Assert.assertTrue(result);
// verify leader changes to localhost_12919
HelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<ZNRecord>(_gZkClient));
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
Assert.assertNotNull(pollForProperty(LiveInstance.class, accessor,
keyBuilder.liveInstance("localhost_12918"), true));
LiveInstance leader =
pollForProperty(LiveInstance.class, accessor, keyBuilder.controllerLeader(), true);
Assert.assertNotNull(leader);
Assert.assertEquals(leader.getId(), "localhost_12919");
// check localhost_12918 has 2 handlers: message and data-accessor
LOG.debug("handlers: " + TestHelper.printHandlers(distributedControllers[0]));
List<CallbackHandler> handlers = distributedControllers[0].getHandlers();
Assert
.assertEquals(
handlers.size(),
1,
"Distributed controller should have 1 handler (message) after lose leadership, but was "
+ handlers.size());
// clean up
distributedControllers[0].disconnect();
distributedControllers[1].disconnect();
Assert.assertNull(pollForProperty(LiveInstance.class, accessor,
keyBuilder.liveInstance("localhost_12918"), false));
Assert.assertNull(pollForProperty(LiveInstance.class, accessor,
keyBuilder.liveInstance("localhost_12919"), false));
Assert.assertNull(pollForProperty(LiveInstance.class, accessor, keyBuilder.controllerLeader(),
false));
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.testing.tools.sling;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.sling.testing.tools.http.RequestBuilder;
import org.apache.sling.testing.tools.http.RequestExecutor;
/** Simple Sling client, created for integration
* tests but should be general purpose */
public class SlingClient {
public static final String LOCATION_HEADER = "Location";
public static final String HTTP_PREFIX = "http://";
private final RequestExecutor executor;
private final RequestBuilder builder;
private final String slingServerUrl;
private final String username;
private final String password;
static class HttpAnyMethod extends HttpRequestBase {
private final URI uri;
private final String method;
HttpAnyMethod(String method, String uriString) {
this.uri = URI.create(uriString);
this.method = method;
}
@Override
public String getMethod() {
return method;
}
@Override
public URI getURI() {
return uri;
}
};
public SlingClient(String slingServerUrl, String username, String password) {
this.slingServerUrl = slingServerUrl;
this.username = username;
this.password = password;
builder = new RequestBuilder(slingServerUrl);
executor = new RequestExecutor(new DefaultHttpClient());
}
/** Create a node at specified path, with optional properties
* specified as a list of String arguments, odd values are keys
* and even arguments are values.
*/
public String createNode(String path, String...properties) throws IOException {
Map<String, Object> props = extractMap(properties);
return createNode(path, props);
}
/** Create a node at specified path, with optional properties
* @param path Used in POST request to Sling server
* @param properties If not null, properties are added to the created node
* @return The actual path of the node that was created
*/
public String createNode(String path, Map<String, Object> properties) throws UnsupportedEncodingException, IOException {
String actualPath = null;
final MultipartEntity entity = new MultipartEntity();
// Add Sling POST options
entity.addPart(":redirect",new StringBody("*"));
entity.addPart(":displayExtension",new StringBody(""));
// Add user properties
if(properties != null) {
for(Map.Entry<String, Object> e : properties.entrySet()) {
entity.addPart(e.getKey(), new StringBody(e.getValue().toString()));
}
}
final HttpResponse response =
executor.execute(
builder.buildPostRequest(path)
.withEntity(entity)
.withCredentials(username, password)
)
.assertStatus(302)
.getResponse();
final Header location = response.getFirstHeader(LOCATION_HEADER);
assertNotNull("Expecting " + LOCATION_HEADER + " in response", location);
actualPath = locationToPath(location.getValue());
return actualPath;
}
/** Convert a Location value to the corresponding node path */
String locationToPath(String locationHeaderValue) {
if(locationHeaderValue.startsWith(slingServerUrl)) {
return locationHeaderValue.substring(slingServerUrl.length());
} else if(locationHeaderValue.startsWith(HTTP_PREFIX)){
throw new IllegalArgumentException(
"Unexpected Location header value [" + locationHeaderValue
+ "], should start with [" + slingServerUrl + "] if starting with "
+ HTTP_PREFIX);
} else {
return locationHeaderValue;
}
}
private Map<String, Object> extractMap(String[] properties) {
Map<String, Object> props = null;
if(properties != null && properties.length > 0) {
props = new HashMap<String, Object>();
if(properties.length % 2 != 0) {
throw new IllegalArgumentException("Odd number of properties is invalid:" + properties.length);
}
for(int i=0 ; i<properties.length; i+=2) {
props.put(properties[i], properties[i+1]);
}
}
return props;
}
/** Updates a node at specified path, with optional properties
* specified as a list of String arguments, odd values are keys
* and even arguments are values.
*/
public void setProperties(String path, String... properties) throws IOException {
Map<String, Object> props = extractMap(properties);
setProperties(path, props);
}
/** Updates a node at specified path, with optional properties
*/
public void setProperties(String path, Map<String, Object> properties) throws IOException {
final MultipartEntity entity = new MultipartEntity();
// Add user properties
if(properties != null) {
for(Map.Entry<String, Object> e : properties.entrySet()) {
entity.addPart(e.getKey(), new StringBody(e.getValue().toString()));
}
}
final HttpResponse response =
executor.execute(
builder.buildPostRequest(path)
.withEntity(entity)
.withCredentials(username, password)
)
.assertStatus(200)
.getResponse();
}
/** Delete supplied path */
public void delete(String path) throws IOException {
executor.execute(
builder.buildOtherRequest(
new HttpDelete(builder.buildUrl(path)))
.withCredentials(username, password)
)
.assertStatus(204);
}
/** Upload using a PUT request.
* @param path the path of the uploaded file
* @param data the content
* @param length Use -1 if unknown
* @param createFolders if true, intermediate folders are created via mkdirs
*/
public void upload(String path, InputStream data, int length, boolean createFolders) throws IOException {
final HttpEntity e = new InputStreamEntity(data, length);
if(createFolders) {
mkdirs(getParentPath(path));
}
executor.execute(
builder.buildOtherRequest(
new HttpPut(builder.buildUrl(path))).withEntity(e)
.withCredentials(username, password)
)
.assertStatus(201);
}
/** Create path and all its parent folders, using MKCOL */
public void mkdirs(String path) throws IOException {
// Call mkdir on all parent paths, starting at the topmost one
final Stack<String> parents = new Stack<String>();
path = getParentPath(path);
while(path.length() > 0 && !exists(path)) {
parents.push(path);
path = getParentPath(path);
}
while(!parents.isEmpty()) {
mkdir(parents.pop());
}
}
/** Create path using MKCOL */
public void mkdir(String path) throws IOException {
if(!exists(path)) {
executor.execute(
builder.buildOtherRequest(
new HttpAnyMethod("MKCOL", builder.buildUrl(path)))
.withCredentials(username, password)
)
.assertStatus(201);
}
}
public boolean exists(String path) throws IOException {
final int status = executor.execute(builder.buildGetRequest(path + ".json")
.withCredentials(username, password))
.getResponse().getStatusLine().getStatusCode();
return status == 200;
}
/** Return parent path: whatever comes before the last / in path, empty
* string if no / in path.
*/
protected String getParentPath(String path) {
final int pos = path.lastIndexOf('/');
if(pos > 0) {
return path.substring(0, pos);
} else {
return "";
}
}
}
| |
package com.anysoftkeyboard.keyboards.views;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static org.mockito.ArgumentMatchers.any;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.HorizontalScrollView;
import androidx.test.core.app.ApplicationProvider;
import com.anysoftkeyboard.AnySoftKeyboardRobolectricTestRunner;
import com.anysoftkeyboard.ime.InputViewBinder;
import com.anysoftkeyboard.theme.KeyboardThemeFactory;
import com.menny.android.anysoftkeyboard.AnyApplication;
import com.menny.android.anysoftkeyboard.R;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.robolectric.Shadows;
@RunWith(AnySoftKeyboardRobolectricTestRunner.class)
public class KeyboardViewContainerViewTest {
private KeyboardViewContainerView mUnderTest;
@Before
public void setup() {
mUnderTest =
(KeyboardViewContainerView)
LayoutInflater.from(getApplicationContext())
.inflate(R.layout.main_keyboard_layout, null, false);
Assert.assertNotNull(mUnderTest.getCandidateView());
Assert.assertNotNull(mUnderTest.getStandardKeyboardView());
}
@Test
public void testDefaultInflation() {
Assert.assertEquals(3, mUnderTest.getChildCount());
Assert.assertTrue(mUnderTest.getChildAt(0) instanceof HorizontalScrollView);
Assert.assertTrue(mUnderTest.getChildAt(1) instanceof CandidateView);
Assert.assertTrue(mUnderTest.getChildAt(2) instanceof AnyKeyboardView);
}
@Test
public void testAddView() {
AnyKeyboardView mock = Mockito.mock(AnyKeyboardView.class);
mUnderTest.addView(mock);
Assert.assertEquals(4, mUnderTest.getChildCount());
Assert.assertSame(mock, mUnderTest.getChildAt(3));
Mockito.verify(mock, Mockito.never()).setKeyboardTheme(any());
Mockito.verify(mock, Mockito.never()).setThemeOverlay(any());
}
@Test
public void testAddViewWhenHasThemeWasSet() {
mUnderTest.setKeyboardTheme(
AnyApplication.getKeyboardThemeFactory(ApplicationProvider.getApplicationContext())
.getEnabledAddOn());
AnyKeyboardView mock = Mockito.mock(AnyKeyboardView.class);
mUnderTest.addView(mock);
Assert.assertEquals(4, mUnderTest.getChildCount());
Assert.assertSame(mock, mUnderTest.getChildAt(3));
Mockito.verify(mock).setKeyboardTheme(any());
Mockito.verify(mock).setThemeOverlay(any());
}
@Test
public void testSetOnKeyboardActionListener() {
AnyKeyboardView mock1 = Mockito.mock(AnyKeyboardView.class);
AnyKeyboardView mock2 = Mockito.mock(AnyKeyboardView.class);
mUnderTest.removeAllViews();
mUnderTest.addView(mock1);
Mockito.verify(mock1, Mockito.never())
.setOnKeyboardActionListener(any(OnKeyboardActionListener.class));
final OnKeyboardActionListener listener = Mockito.mock(OnKeyboardActionListener.class);
mUnderTest.setOnKeyboardActionListener(listener);
Mockito.verify(mock1).setOnKeyboardActionListener(listener);
mUnderTest.addView(mock2);
Mockito.verify(mock2).setOnKeyboardActionListener(listener);
}
@Test
public void testGetStandardKeyboardView() {
final InputViewBinder originalView = mUnderTest.getStandardKeyboardView();
Assert.assertNotNull(originalView);
Assert.assertTrue(originalView instanceof AnyKeyboardView);
AnyKeyboardView mock1 = Mockito.mock(AnyKeyboardView.class);
AnyKeyboardView mock2 = Mockito.mock(AnyKeyboardView.class);
mUnderTest.addView(mock1);
mUnderTest.addView(mock2);
Assert.assertSame(originalView, mUnderTest.getStandardKeyboardView());
}
@Test
public void testGetCandidateView() {
final CandidateView originalView = mUnderTest.getCandidateView();
Assert.assertNotNull(originalView);
AnyKeyboardView mock2 = Mockito.mock(AnyKeyboardView.class);
mUnderTest.addView(mock2);
Assert.assertSame(originalView, mUnderTest.getCandidateView());
mUnderTest.removeView(mock2);
Assert.assertSame(originalView, mUnderTest.getCandidateView());
}
@Test
public void testCandidateThemeSet() {
final CandidateView originalView = mUnderTest.getCandidateView();
Assert.assertNotNull(originalView);
final KeyboardThemeFactory keyboardThemeFactory =
AnyApplication.getKeyboardThemeFactory(getApplicationContext());
// switching to light icon
keyboardThemeFactory.setAddOnEnabled("18c558ef-bc8c-433a-a36e-92c3ca3be4dd", true);
mUnderTest.setKeyboardTheme(keyboardThemeFactory.getEnabledAddOn());
final Drawable lightIcon = originalView.getCloseIcon();
Assert.assertNotNull(lightIcon);
Assert.assertEquals(
R.drawable.close_suggestions_light,
Shadows.shadowOf(lightIcon).getCreatedFromResId());
// switching to dark icon
keyboardThemeFactory.setAddOnEnabled("8774f99e-fb4a-49fa-b8d0-4083f762250a", true);
mUnderTest.setKeyboardTheme(keyboardThemeFactory.getEnabledAddOn());
final Drawable darkIcon = originalView.getCloseIcon();
Assert.assertNotNull(darkIcon);
Assert.assertEquals(
R.drawable.yochees_dark_close_suggetions,
Shadows.shadowOf(darkIcon).getCreatedFromResId());
}
@Test
public void testAddRemoveAction() {
View view = new View(mUnderTest.getContext());
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
mUnderTest.addStripAction(provider);
Mockito.verify(provider).inflateActionView(mUnderTest);
Mockito.verify(provider, Mockito.never()).onRemoved();
Assert.assertEquals(4, mUnderTest.getChildCount());
Assert.assertSame(view, mUnderTest.getChildAt(3));
mUnderTest.removeStripAction(provider);
Mockito.verify(provider).onRemoved();
Assert.assertEquals(3, mUnderTest.getChildCount());
}
@Test
public void testStripVisibility() {
final int initialChildCount = mUnderTest.getChildCount();
final int actionViewId = R.id.demo_keyboard_view /*can be whatever*/;
View view = new View(mUnderTest.getContext());
view.setId(actionViewId);
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
mUnderTest.addStripAction(provider);
Assert.assertEquals(View.VISIBLE, mUnderTest.getCandidateView().getVisibility());
Assert.assertSame(view, mUnderTest.findViewById(actionViewId));
Assert.assertEquals(initialChildCount + 1, mUnderTest.getChildCount());
mUnderTest.setActionsStripVisibility(false);
Assert.assertEquals(View.GONE, mUnderTest.getCandidateView().getVisibility());
Assert.assertNull(mUnderTest.findViewById(actionViewId));
Assert.assertEquals(initialChildCount, mUnderTest.getChildCount());
mUnderTest.setActionsStripVisibility(true);
Assert.assertEquals(View.VISIBLE, mUnderTest.getCandidateView().getVisibility());
Assert.assertSame(view, mUnderTest.findViewById(actionViewId));
Assert.assertEquals(initialChildCount + 1, mUnderTest.getChildCount());
mUnderTest.setActionsStripVisibility(true);
Assert.assertEquals(View.VISIBLE, mUnderTest.getCandidateView().getVisibility());
Assert.assertSame(view, mUnderTest.findViewById(actionViewId));
Assert.assertEquals(initialChildCount + 1, mUnderTest.getChildCount());
}
@Test
public void testQueueActionViewForAdditionWhenNotVisible() {
final int actionViewId = R.id.demo_keyboard_view /*can be whatever*/;
View view = new View(mUnderTest.getContext());
view.setId(actionViewId);
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
final int actionViewId2 = R.id.demo_keyboard_view_background /*can be whatever*/;
View view2 = new View(mUnderTest.getContext());
view2.setId(actionViewId2);
KeyboardViewContainerView.StripActionProvider provider2 =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view2).when(provider2).inflateActionView(any());
mUnderTest.addStripAction(provider);
Assert.assertSame(view, mUnderTest.findViewById(actionViewId));
Mockito.verify(provider).inflateActionView(any());
mUnderTest.setActionsStripVisibility(false);
Assert.assertNull(mUnderTest.findViewById(actionViewId));
mUnderTest.addStripAction(provider2);
Assert.assertNull(mUnderTest.findViewById(actionViewId));
Assert.assertNull(mUnderTest.findViewById(actionViewId2));
Mockito.verify(provider).inflateActionView(any());
Mockito.verify(provider).inflateActionView(any());
mUnderTest.setActionsStripVisibility(true);
Assert.assertSame(view, mUnderTest.findViewById(actionViewId));
Assert.assertSame(view2, mUnderTest.findViewById(actionViewId2));
// no additional calls, still once.
Mockito.verify(provider).inflateActionView(any());
Mockito.verify(provider).inflateActionView(any());
}
@Test
public void testDoubleAddDoesNotAddAgain() {
View view = new View(mUnderTest.getContext());
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
mUnderTest.addStripAction(provider);
mUnderTest.addStripAction(provider);
Mockito.verify(provider).inflateActionView(mUnderTest);
Mockito.verify(provider, Mockito.never()).onRemoved();
Assert.assertEquals(4, mUnderTest.getChildCount());
Assert.assertSame(view, mUnderTest.getChildAt(3));
}
@Test(expected = IllegalStateException.class)
public void testFailIfViewAddedWithParent() {
mUnderTest.setActionsStripVisibility(false);
View view = new View(mUnderTest.getContext());
// adding parent
FrameLayout parent = new FrameLayout(mUnderTest.getContext());
parent.addView(view);
Assert.assertSame(parent, view.getParent());
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
// this will fail
mUnderTest.addStripAction(provider);
}
@Test
public void testDoubleAddViewDoesNotCrash() {
mUnderTest.setActionsStripVisibility(false);
View view = new View(mUnderTest.getContext());
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
mUnderTest.addStripAction(provider);
mUnderTest.setActionsStripVisibility(true);
mUnderTest.setActionsStripVisibility(true);
Assert.assertEquals(4, mUnderTest.getChildCount());
Assert.assertSame(view, mUnderTest.getChildAt(3));
Assert.assertSame(mUnderTest, view.getParent());
}
@Test
public void testMeasure() {
mUnderTest.onMeasure(
View.MeasureSpec.makeMeasureSpec(1024, View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(1024, View.MeasureSpec.AT_MOST));
Assert.assertEquals(View.VISIBLE, mUnderTest.getCandidateView().getVisibility());
Assert.assertEquals(1024, mUnderTest.getMeasuredWidth());
Assert.assertEquals(1068, mUnderTest.getMeasuredHeight());
mUnderTest.setActionsStripVisibility(false);
mUnderTest.onMeasure(
View.MeasureSpec.makeMeasureSpec(1024, View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(1024, View.MeasureSpec.AT_MOST));
Assert.assertEquals(View.GONE, mUnderTest.getCandidateView().getVisibility());
Assert.assertEquals(1024, mUnderTest.getMeasuredWidth());
Assert.assertEquals(1024, mUnderTest.getMeasuredHeight());
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.util.Throwables;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.model.Objects;
import com.google.api.services.storage.model.StorageObject;
import com.google.cloud.dataflow.sdk.options.GcsOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.util.gcsfs.GcsPath;
import com.google.cloud.dataflow.sdk.util.gcsio.GoogleCloudStorageReadChannel;
import com.google.common.collect.ImmutableList;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
import java.io.IOException;
import java.nio.channels.SeekableByteChannel;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/** Test case for {@link GcsUtil}. */
@RunWith(JUnit4.class)
public class GcsUtilTest {
@Rule public ExpectedException exception = ExpectedException.none();
@Test
public void testGlobTranslation() {
assertEquals("foo", GcsUtil.globToRegexp("foo"));
assertEquals("fo[^/]*o", GcsUtil.globToRegexp("fo*o"));
assertEquals("f[^/]*o\\.[^/]", GcsUtil.globToRegexp("f*o.?"));
assertEquals("foo-[0-9][^/]*", GcsUtil.globToRegexp("foo-[0-9]*"));
}
@Test
public void testCreationWithDefaultOptions() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(Mockito.mock(Credential.class));
assertNotNull(pipelineOptions.getGcpCredential());
}
@Test
public void testCreationWithExecutorServiceProvided() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(Mockito.mock(Credential.class));
pipelineOptions.setExecutorService(Executors.newCachedThreadPool());
assertSame(pipelineOptions.getExecutorService(), pipelineOptions.getGcsUtil().executorService);
}
@Test
public void testCreationWithGcsUtilProvided() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
GcsUtil gcsUtil = Mockito.mock(GcsUtil.class);
pipelineOptions.setGcsUtil(gcsUtil);
assertSame(gcsUtil, pipelineOptions.getGcsUtil());
}
@Test
public void testMultipleThreadsCanCompleteOutOfOrderWithDefaultThreadPool() throws Exception {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
ExecutorService executorService = pipelineOptions.getExecutorService();
int numThreads = 1000;
final CountDownLatch[] countDownLatches = new CountDownLatch[numThreads];
for (int i = 0; i < numThreads; i++) {
final int currentLatch = i;
countDownLatches[i] = new CountDownLatch(1);
executorService.execute(new Runnable() {
@Override
public void run() {
// Wait for latch N and then release latch N - 1
try {
countDownLatches[currentLatch].await();
if (currentLatch > 0) {
countDownLatches[currentLatch - 1].countDown();
}
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
});
}
// Release the last latch starting the chain reaction.
countDownLatches[countDownLatches.length - 1].countDown();
executorService.shutdown();
assertTrue("Expected tasks to complete",
executorService.awaitTermination(10, TimeUnit.SECONDS));
}
@Test
public void testGlobExpansion() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(Mockito.mock(Credential.class));
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class);
Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class);
Objects modelObjects = new Objects();
List<StorageObject> items = new ArrayList<>();
// A directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/"));
// Files within the directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file1name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file2name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file3name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/anotherfile"));
modelObjects.setItems(items);
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList);
when(mockStorageList.execute()).thenReturn(modelObjects);
// Test a single file.
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/otherfile");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/otherfile"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
// Test patterns.
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file*");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file[1-3]*");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file?name");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/test*ectory/fi*name");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
}
// Patterns that contain recursive wildcards ('**') are not supported.
@Test
public void testRecursiveGlobExpansionFails() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(Mockito.mock(Credential.class));
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
GcsPath pattern = GcsPath.fromUri("gs://testbucket/test**");
exception.expect(IllegalArgumentException.class);
exception.expectMessage("Unsupported wildcard usage");
gcsUtil.expand(pattern);
}
// GCSUtil.expand() should not fail for non-existent single files or directories, since GCS file
// listing is only eventually consistent.
@Test
public void testNonExistent() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(Mockito.mock(Credential.class));
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class);
Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class);
Objects modelObjects = new Objects();
List<StorageObject> items = new ArrayList<>();
// A directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/"));
modelObjects.setItems(items);
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList);
when(mockStorageList.execute()).thenReturn(modelObjects);
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentfile");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentfile"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentdirectory/");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentdirectory/"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
}
@Test
public void testGCSChannelCloseIdempotent() throws IOException {
SeekableByteChannel channel =
new GoogleCloudStorageReadChannel(null, "dummybucket", "dummyobject", null);
channel.close();
channel.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.naming.resources;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import javax.naming.NamingException;
import javax.naming.directory.Attributes;
import org.apache.naming.NamingEntry;
/**
* Extended FileDirContext implementation that allows to expose multiple
* directories of the filesystem under a single webapp, a feature mainly used
* for development with IDEs.
* This should be used in conjunction with
* {@link org.apache.catalina.loader.VirtualWebappLoader}.
*
* Sample context xml configuration:
*
* <code>
* <Context path="/mywebapp" docBase="/Users/theuser/mywebapp/src/main/webapp" >
* <Resources className="org.apache.naming.resources.VirtualDirContext"
* extraResourcePaths="/pictures=/Users/theuser/mypictures,/movies=/Users/theuser/mymovies" />
* <Loader className="org.apache.catalina.loader.VirtualWebappLoader"
* virtualClasspath="/Users/theuser/mywebapp/target/classes" />
* <JarScanner scanAllDirectories="true" />
* </Context>
* </code>
*
*
* <strong>This is not meant to be used for production.
* Its meant to ease development with IDE's without the
* need for fully republishing jars in WEB-INF/lib</strong>
*
*
* @author Fabrizio Giustina
*/
public class VirtualDirContext extends FileDirContext {
private String extraResourcePaths = "";
private Map<String, List<String>> mappedResourcePaths;
/**
* <p>
* Allows to map a path of the filesystem to a path in the webapp. Multiple
* filesystem paths can be mapped to the same path in the webapp. Filesystem
* path and virtual path must be separated by an equal sign. Pairs of paths
* must be separated by a comma.
* </p>
* Example: <code>
* /=/Users/slaurent/mywebapp/src/main/webapp,/pictures=/Users/slaurent/sharedpictures
* </code>
* <p>
* The path to the docBase must not be added here, otherwise resources would
* be listed twice.
* </p>
*
* @param path
*/
public void setExtraResourcePaths(String path) {
extraResourcePaths = path;
}
/**
* {@inheritDoc}
*/
@Override
public void allocate() {
super.allocate();
mappedResourcePaths = new HashMap<String, List<String>>();
StringTokenizer tkn = new StringTokenizer(extraResourcePaths, ",");
while (tkn.hasMoreTokens()) {
String resSpec = tkn.nextToken();
if (resSpec.length() > 0) {
int idx = resSpec.indexOf('=');
String path;
if (idx <= 0) {
path = "";
}
else {
if (resSpec.startsWith("/=")) {
resSpec = resSpec.substring(1);
idx--;
}
path = resSpec.substring(0, idx);
}
String dir = resSpec.substring(idx + 1);
List<String> resourcePaths = mappedResourcePaths.get(path);
if (resourcePaths == null) {
resourcePaths = new ArrayList<String>();
mappedResourcePaths.put(path, resourcePaths);
}
resourcePaths.add(dir);
}
}
if (mappedResourcePaths.isEmpty()) {
mappedResourcePaths = null;
}
}
/**
* {@inheritDoc}
*/
@Override
public void release() {
mappedResourcePaths = null;
super.release();
}
@Override
public Attributes getAttributes(String name) throws NamingException {
NamingException initialException;
try {
// first try the normal processing, if it fails try with extra
// resources
Attributes attributes = super.getAttributes(name);
return attributes;
} catch (NamingException exc) {
initialException = exc;
}
if (mappedResourcePaths != null) {
for (Map.Entry<String, List<String>> mapping : mappedResourcePaths.entrySet()) {
String path = mapping.getKey();
List<String> dirList = mapping.getValue();
String resourcesDir = dirList.get(0);
if (name.equals(path)) {
File f = new File(resourcesDir);
if (f.exists() && f.canRead()) {
return new FileResourceAttributes(f);
}
}
path += "/";
if (name.startsWith(path)) {
String res = name.substring(path.length());
File f = new File(resourcesDir + "/" + res);
if (f.exists() && f.canRead()) {
return new FileResourceAttributes(f);
}
}
}
}
throw initialException;
}
@Override
protected File file(String name) {
File file = super.file(name);
if (file != null || mappedResourcePaths == null) {
return file;
}
// If not found under docBase, try our other resources
// Ensure name string begins with a slash
if (name.length() > 0 && name.charAt(0) != '/') {
name = "/" + name;
}
for (Map.Entry<String, List<String>> mapping : mappedResourcePaths.entrySet()) {
String path = mapping.getKey();
List<String> dirList = mapping.getValue();
if (name.equals(path)) {
for (String resourcesDir : dirList) {
file = new File(resourcesDir);
if (file.exists() && file.canRead()) {
return file;
}
}
}
if (name.startsWith(path + "/")) {
String res = name.substring(path.length());
for (String resourcesDir : dirList) {
file = new File(resourcesDir, res);
if (file.exists() && file.canRead()) {
return file;
}
}
}
}
return null;
}
@Override
protected List<NamingEntry> list(File file) {
List<NamingEntry> entries = super.list(file);
if (mappedResourcePaths != null && !mappedResourcePaths.isEmpty()) {
Set<String> entryNames = new HashSet<String>(entries.size());
for (NamingEntry entry : entries) {
entryNames.add(entry.name);
}
// Add appropriate entries from the extra resource paths
String absPath = file.getAbsolutePath();
if (absPath.startsWith(getDocBase() + File.separator)) {
String relPath = absPath.substring(getDocBase().length());
String fsRelPath = relPath.replace(File.separatorChar, '/');
for (Map.Entry<String, List<String>> mapping : mappedResourcePaths.entrySet()) {
String path = mapping.getKey();
List<String> dirList = mapping.getValue();
String res = null;
if (fsRelPath.equals(path)) {
res = "";
} else if (fsRelPath.startsWith(path + "/")) {
res = relPath.substring(path.length());
}
if (res != null) {
for (String resourcesDir : dirList) {
File f = new File(resourcesDir, res);
if (f.exists() && f.canRead() && f.isDirectory()) {
List<NamingEntry> virtEntries = super.list(f);
for (NamingEntry entry : virtEntries) {
// filter duplicate
if (!entryNames.contains(entry.name)) {
entryNames.add(entry.name);
entries.add(entry);
}
}
}
}
}
}
}
}
return entries;
}
@Override
protected Object doLookup(String name) {
Object retSuper = super.doLookup(name);
if (retSuper != null || mappedResourcePaths == null) {
return retSuper;
}
// Perform lookup using the extra resource paths
for (Map.Entry<String, List<String>> mapping : mappedResourcePaths.entrySet()) {
String path = mapping.getKey();
List<String> dirList = mapping.getValue();
if (name.equals(path)) {
for (String resourcesDir : dirList) {
File f = new File(resourcesDir);
if (f.exists() && f.canRead()) {
if (f.isFile()) {
return new FileResource(f);
}
else {
// never goes here, if f is a directory the super
// implementation already returned a value
}
}
}
}
path += "/";
if (name.startsWith(path)) {
String res = name.substring(path.length());
for (String resourcesDir : dirList) {
File f = new File(resourcesDir + "/" + res);
if (f.exists() && f.canRead()) {
if (f.isFile()) {
return new FileResource(f);
}
else {
// never goes here, if f is a directory the super
// implementation already returned a value
}
}
}
}
}
return retSuper;
}
@Override
protected String doGetRealPath(String path) {
File file = file(path);
if (null != file) {
return file.getAbsolutePath();
} else {
return null;
}
}
}
| |
package com.kirankunigiri.Sprint;
import javafx.animation.*;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.geometry.Bounds;
import javafx.scene.Node;
import javafx.scene.paint.Color;
import javafx.scene.shape.Shape;
import javafx.util.Duration;
/**
* @author Kiran Kunigiri
* @version 1.0
* Sprint is an animation framework for JavaFX that makes
* animation incredibly easy.
*/
/** The main animator class */
public class Sprint {
// Properties
private Timeline timeline;
private SequentialTransition sequentialTransition;
private Interpolator interpolator = Interpolator.EASE_OUT;
private Node node;
/** The animation state of sprint */
public BooleanProperty isAnimating;
/**
* Creates a sprint animator with a node. This node can be changed to any other node later using setNode()
* @param node The element to animate
*/
public Sprint(Node node) {
this.timeline = new Timeline();
this.sequentialTransition = new SequentialTransition();
this.node = node;
isAnimating = new SimpleBooleanProperty(this, "isAnimating", false);
}
/**
* Animates the element to the new position at the specified coordinates.
* @param duration Duration of the animation
* @param x The new x coordinate to animate to
* @param y The new y coordinate to animate to
*/
public Sprint moveTo(double duration, int x, int y) {
KeyValue keyValueX;
KeyValue keyValueY;
keyValueX = new KeyValue(node.translateXProperty(), x, interpolator);
keyValueY = new KeyValue(node.translateYProperty(), y, interpolator);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX, keyValueY);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element from the specified coordinates back to it's original position
* @param duration Duration of the animation
* @param x The new x coordinate to animate from
* @param y The new y coordinate to animate from
*/
public Sprint moveFrom(double duration, double x, double y) {
Bounds boundsInScene = node.localToScene(node.getBoundsInLocal());
KeyValue keyValueX;
KeyValue keyValueY;
keyValueX = new KeyValue(node.translateXProperty(), node.getTranslateX(), interpolator);
keyValueY = new KeyValue(node.translateYProperty(), node.getTranslateY(), interpolator);
node.setTranslateX(x);
node.setTranslateY(y);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX, keyValueY);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element to the new opacity specified
* @param duration Duration of the animation
* @param opacity The new opacity to animate to
*/
public Sprint fadeTo(double duration, double opacity) {
KeyValue keyValueX;
keyValueX = new KeyValue(node.opacityProperty(), opacity, interpolator);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element from the specified opacity back to the original value
* @param duration Duration of the animation
* @param opacity The opacity to animate from
*/
public Sprint fadeFrom(double duration, double opacity) {
KeyValue keyValueX;
keyValueX = new KeyValue(node.opacityProperty(), node.getOpacity(), interpolator);
node.setOpacity(opacity);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's scale to the new value specified
* @param duration Duration of the animation
* @param x The scale width to animate to (The multiplier value to the original width)
* @param y The scale height to animate to (The multiplier value to the original height)
*/
public Sprint scaleTo(double duration, double x, double y) {
KeyValue keyValueX;
KeyValue keyValueY;
keyValueX = new KeyValue(node.scaleXProperty(), x, interpolator);
keyValueY = new KeyValue(node.scaleYProperty(), y, interpolator);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX, keyValueY);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's scale from the one specified back to the original
* @param duration Duration of the animation
* @param x The scale width to animate from (The multiplier value to the original width)
* @param y The scale height to animate from (The multiplier value to the original height)
*/
public Sprint scaleFrom(double duration, double x, double y) {
KeyValue keyValueX;
KeyValue keyValueY;
keyValueX = new KeyValue(node.scaleXProperty(), node.getScaleX(), interpolator);
keyValueY = new KeyValue(node.scaleYProperty(), node.getScaleY(), interpolator);
node.setScaleX(x);
node.setScaleY(y);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX, keyValueY);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's rotation to the new value specified
* @param duration Duration of the animation
* @param angle The angle to rotate the element by around it's center
*/
public Sprint rotateTo(double duration, double angle) {
KeyValue keyValueX;
keyValueX = new KeyValue(node.rotateProperty(), angle, interpolator);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element from the specified rotation back to the original value
* @param duration Duration of the animation
* @param angle The angle to rotate the element by around it's center
*/
public Sprint rotateFrom(double duration, double angle) {
KeyValue keyValueX;
keyValueX = new KeyValue(node.rotateProperty(), node.getRotate(), interpolator);
node.setRotate(angle);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's color to the specified value. CAN ONLY BE APPLIED TO SHAPES.
* @param duration Duration of the animation
* @param color The color to animate to
*/
public Sprint fillColorTo(double duration, Color color) {
KeyValue keyValueX;
if (node instanceof Shape) {
Shape shape = (Shape) node;
keyValueX = new KeyValue(shape.fillProperty(), color, interpolator);
} else {
return this;
}
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's color from the specified value back to the original. CAN ONLY BE APPLIED TO SHAPES.
* @param duration Duration of the animation
* @param color The color to animate from
*/
public Sprint fillColorFrom(double duration, Color color) {
KeyValue keyValueX;
if (node instanceof Shape) {
Shape shape = (Shape) node;
keyValueX = new KeyValue(shape.fillProperty(), shape.getFill(), interpolator);
shape.setFill(color);
} else {
return this;
}
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's stroke color to specified value. CAN ONLY BE APPLIED TO SHAPES.
* @param duration Duration of the animation
* @param color The color to animate to
*/
public Sprint strokeColorTo(double duration, Color color) {
KeyValue keyValueX;
if (node instanceof Shape) {
Shape shape = (Shape) node;
keyValueX = new KeyValue(shape.strokeProperty(), color, interpolator);
} else {
return this;
}
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Animates the element's border color from the specified value back to the original. CAN ONLY BE APPLIED TO SHAPES.
* @param duration Duration of the animation
* @param color The color to animate from
*/
public Sprint strokeColorFrom(double duration, Color color) {
KeyValue keyValueX;
if (node instanceof Shape) {
Shape shape = (Shape) node;
keyValueX = new KeyValue(shape.strokeProperty(), shape.getStroke(), interpolator);
shape.setStroke(color);
} else {
return this;
}
KeyFrame keyFrame = new KeyFrame(Duration.seconds(duration), keyValueX);
timeline.getKeyFrames().add(keyFrame);
return this;
}
/**
* Change the interpolator to a custom one
* @param interpolator The new interpolator to use
*/
public Sprint setInterpolator(Interpolator interpolator) {
this.interpolator = interpolator;
return this;
}
/**
* Get the current interpolator being used
* @return interpolator
*/
public Interpolator getInterpolator() {
return this.interpolator;
}
/**
* Play the animation
*/
public void sprint() {
sequentialTransition.getChildren().add(timeline);
sequentialTransition.play();
isAnimating.set(true);
sequentialTransition.setOnFinished(event -> {
isAnimating.set(false);
});
this.timeline = new Timeline();
this.sequentialTransition = new SequentialTransition();
}
/**
* Loops the animation. Use the value 0 to run the loop indefinitely.
* Looping animations usually work best with the ease both interpolator.
* @param count The number of times to run the animation. 0 for indefinite.
*/
public void loop(int count) {
sequentialTransition.getChildren().add(timeline);
if (count == 0) {
sequentialTransition.setCycleCount(SequentialTransition.INDEFINITE);
} else {
sequentialTransition.setCycleCount(count);
}
sequentialTransition.setAutoReverse(true);
sequentialTransition.play();
isAnimating.set(true);
sequentialTransition.setOnFinished(event -> {
isAnimating.set(false);
});
this.timeline = new Timeline();
this.sequentialTransition = new SequentialTransition();
}
/**
* Create a pause in the animation timeline, so that elements can animate at different times.
* Normally, all animations are occur at the same time. Use wait() in order to move on to the next animation.
* Use a time of 0.0 if you want there to be no gap, but rather just a transition to the next animation.
* @param time The duration to pause the animation
*/
public Sprint wait(double time) {
sequentialTransition.getChildren().add(timeline);
timeline = new Timeline();
// Adds a fake animation to create a pause
KeyValue keyValueX = new KeyValue(node.rotateProperty(), node.getRotate());
Duration duration = Duration.seconds(time);
KeyFrame keyFrame = new KeyFrame(duration, keyValueX);
timeline.getKeyFrames().add(keyFrame);
sequentialTransition.getChildren().add(timeline);
timeline = new Timeline();
return this;
}
/**
* Change the element being animated.
* @param node The new element to animate
*/
public Sprint setNode(Node node) {
this.node = node;
return this;
}
/**
* Get the node being animated
* @return node
*/
public Node getNode() {
return this.node;
}
/**
* Determine whether Sprint is currently animating or not
* @return Animation state
*/
public boolean isAnimating() {
return this.isAnimating.get();
}
// Helper animation functions
/**
* Slides an element in from the right side of the screen to its original position.
* @param duration The duration of the animation
*/
public Sprint slideFromRight(double duration) {
Bounds boundsInScene = node.localToScene(node.getBoundsInLocal());
double dx = node.getScene().getWidth() - boundsInScene.getMaxX() + boundsInScene.getWidth();
this.moveFrom(duration, dx, node.getTranslateY());
return this;
}
/**
* Slides an element in from the left side of the screen to its original position.
* @param duration The duration of the animation
*/
public Sprint slideFromLeft(double duration) {
Bounds boundsInScene = node.localToScene(node.getBoundsInLocal());
double dx = -boundsInScene.getMinX() - boundsInScene.getWidth();
this.moveFrom(duration, dx, node.getTranslateY());
return this;
}
/**
* Slides an element in from the top of the screen to its original position.
* @param duration The duration of the animation
*/
public Sprint slideFromTop(double duration) {
Bounds boundsInScene = node.localToScene(node.getBoundsInLocal());
double dy = -boundsInScene.getMinY() - boundsInScene.getHeight();
this.moveFrom(duration, node.getTranslateX(), dy);
return this;
}
/**
* Slides an element in from the bottom of the screen to its original position.
* @param duration The duration of the animation
*/
public Sprint slideFromBottom(double duration) {
Bounds boundsInScene = node.localToScene(node.getBoundsInLocal());
double dy = node.getScene().getHeight() - boundsInScene.getMaxY() + boundsInScene.getHeight();
this.moveFrom(duration, node.getTranslateX(), dy);
return this;
}
}
| |
/*Generated! Do not modify!*/ package generated.fliesenui.screen;
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ import java.util.List;
/*Generated! Do not modify!*/ import java.util.Collection;
/*Generated! Do not modify!*/ import java.util.TreeSet;
/*Generated! Do not modify!*/ import java.util.Set;
/*Generated! Do not modify!*/ import java.util.ArrayList;
/*Generated! Do not modify!*/ import java.util.HashSet;
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIAbstractReply;
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIUtil;
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIString.StringLanguage;
/*Generated! Do not modify!*/ import generated.fliesenui.core.TextHighlighting;
/*Generated! Do not modify!*/ import generated.fliesenui.core.CursorPos;
/*Generated! Do not modify!*/ import generated.fliesenui.core.ContextAssist;
/*Generated! Do not modify!*/ import generated.fliesenui.core.IDLabelImageAssetList;
/*Generated! Do not modify!*/ import generated.fliesenui.core.IDLabelList;
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIReplyAction.ReplyActionType;
/*Generated! Do not modify!*/ import generated.fliesenui.core.InputDialogParameters;
/*Generated! Do not modify!*/ import generated.fliesenui.core.ConfirmDialogParameters;
/*Generated! Do not modify!*/ import generated.fliesenui.core.ListChooserParameters;
/*Generated! Do not modify!*/ import generated.fliesenui.core.IDLabel;
/*Generated! Do not modify!*/ import generated.fliesenui.core.IDLabelImageAsset;
/*Generated! Do not modify!*/ import generated.fliesenui.core.ListChooserItem;
/*Generated! Do not modify!*/ import generated.fliesenui.core.BrightMarkdown;
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIMarkdownFormatting;
/*Generated! Do not modify!*/ import generated.fliesenui.core.FLUIImageAssets.ImageAsset;
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ import generated.fliesenui.dto.DetailsParameterDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.DetailsParameterListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.EditStateDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.EditStateListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelListListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewItemDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewItemListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewListListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewParameterDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewParameterListDTO;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.DetailsParameterDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.DetailsParameterListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.EditStateDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.EditStateListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.IdAndLabelListListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewItemDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewItemListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewListListDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewParameterDTOBuilder;
/*Generated! Do not modify!*/ import generated.fliesenui.dto.OverviewParameterListDTOBuilder;
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public class OverviewReply extends FLUIAbstractReply implements CommonSharedReply {
/*Generated! Do not modify!*/ public OverviewReply(boolean recordMode, StringLanguage currentLanguage) {
/*Generated! Do not modify!*/ super(recordMode, currentLanguage);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ protected String getJSON() {
/*Generated! Do not modify!*/ return gson.toJson(replyDTO);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setInfoText(String text){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_info_propertyText");
/*Generated! Do not modify!*/ if (text == null) {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().remove("overview_info_propertyText");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_info_propertyText", text);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setInfoText(" + escapeString(text) + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setInfoVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_info_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_info_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setInfoVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setMenuButtonText(String text){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_menuButton_propertyText");
/*Generated! Do not modify!*/ if (text == null) {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().remove("overview_menuButton_propertyText");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_menuButton_propertyText", text);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setMenuButtonText(" + escapeString(text) + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setMenuButtonVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_menuButton_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_menuButton_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setMenuButtonVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setSortInfoTextText(String text){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_sortInfoText_propertyText");
/*Generated! Do not modify!*/ if (text == null) {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().remove("overview_sortInfoText_propertyText");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_sortInfoText_propertyText", text);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setSortInfoTextText(" + escapeString(text) + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setSortInfoTextVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_sortInfoText_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_sortInfoText_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setSortInfoTextVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setColorFilterInfoTextText(String text){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_colorFilterInfoText_propertyText");
/*Generated! Do not modify!*/ if (text == null) {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().remove("overview_colorFilterInfoText_propertyText");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_colorFilterInfoText_propertyText", text);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setColorFilterInfoTextText(" + escapeString(text) + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setColorFilterInfoTextVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_colorFilterInfoText_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_colorFilterInfoText_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setColorFilterInfoTextVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setSortInfoBarVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_sortInfoBar_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_sortInfoBar_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setSortInfoBarVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setColorFilterInfoBarVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_colorFilterInfoBar_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_colorFilterInfoBar_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setColorFilterInfoBarVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setTopContainerVisible(boolean visible){
/*Generated! Do not modify!*/ replyDTO.getVariablesToSet().add("overview_topContainer_propertyVisible");
/*Generated! Do not modify!*/ replyDTO.getVariableValues().put("overview_topContainer_propertyVisible", visible);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("setTopContainerVisible(" + visible + ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setItemsDTO(OverviewListDTO dto){
/*Generated! Do not modify!*/ replyDTO.getDTOsToSet().add("items");
/*Generated! Do not modify!*/ if (dto == null){
/*Generated! Do not modify!*/ replyDTO.getDTOValues().remove("items");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getDTOValues().put("items", dto);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.SET_DTO, "setItemsDTO(", gson.toJson(dto), getClassName(dto));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void setParameterDTO(OverviewParameterDTO dto){
/*Generated! Do not modify!*/ replyDTO.getDTOsToSet().add("parameter");
/*Generated! Do not modify!*/ if (dto == null){
/*Generated! Do not modify!*/ replyDTO.getDTOValues().remove("parameter");
/*Generated! Do not modify!*/ } else {
/*Generated! Do not modify!*/ replyDTO.getDTOValues().put("parameter", dto);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.SET_DTO, "setParameterDTO(", gson.toJson(dto), getClassName(dto));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenAbout(OverviewParameterDTO parameter){
/*Generated! Do not modify!*/ replyDTO.setOpenParameter(parameter);
/*Generated! Do not modify!*/ replyDTO.setScreenToOpen("about");
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.OPEN_SCREEN, "openScreenAbout(", gson.toJson(parameter), getClassName(parameter));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenDetails(DetailsParameterDTO parameter){
/*Generated! Do not modify!*/ replyDTO.setOpenParameter(parameter);
/*Generated! Do not modify!*/ replyDTO.setScreenToOpen("details");
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.OPEN_SCREEN, "openScreenDetails(", gson.toJson(parameter), getClassName(parameter));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenLogin(){
/*Generated! Do not modify!*/ replyDTO.setScreenToOpen("login");
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.OPEN_SCREEN, "openScreenLogin(");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenMarkdownHelp(OverviewParameterDTO parameter){
/*Generated! Do not modify!*/ replyDTO.setOpenParameter(parameter);
/*Generated! Do not modify!*/ replyDTO.setScreenToOpen("markdownHelp");
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.OPEN_SCREEN, "openScreenMarkdownHelp(", gson.toJson(parameter), getClassName(parameter));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenOverview(OverviewParameterDTO parameter){
/*Generated! Do not modify!*/ replyDTO.setOpenParameter(parameter);
/*Generated! Do not modify!*/ replyDTO.setScreenToOpen("overview");
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.OPEN_SCREEN, "openScreenOverview(", gson.toJson(parameter), getClassName(parameter));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenAbout(String password) {
/*Generated! Do not modify!*/ openScreenAbout(OverviewParameterDTOBuilder.construct(password));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenDetails(String noteName, String password) {
/*Generated! Do not modify!*/ openScreenDetails(DetailsParameterDTOBuilder.construct(noteName, password));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenMarkdownHelp(String password) {
/*Generated! Do not modify!*/ openScreenMarkdownHelp(OverviewParameterDTOBuilder.construct(password));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public void openScreenOverview(String password) {
/*Generated! Do not modify!*/ openScreenOverview(OverviewParameterDTOBuilder.construct(password));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewStringInputDialogOptions showInputDialog(String referenceID, String title, String textContent, String label, String initialValueText, String okText, String cancelText){
/*Generated! Do not modify!*/ InputDialogParameters inputDialogParameters = new InputDialogParameters();
/*Generated! Do not modify!*/ inputDialogParameters.setReferenceID(referenceID);
/*Generated! Do not modify!*/ inputDialogParameters.setTitle(title);
/*Generated! Do not modify!*/ inputDialogParameters.setTextContent(textContent);
/*Generated! Do not modify!*/ inputDialogParameters.setLabel(label);
/*Generated! Do not modify!*/ inputDialogParameters.setInitialValueText(initialValueText);
/*Generated! Do not modify!*/ inputDialogParameters.setOkText(okText);
/*Generated! Do not modify!*/ inputDialogParameters.setCancelText(cancelText);
/*Generated! Do not modify!*/ replyDTO.setInputDialogParameters(inputDialogParameters);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("showInputDialog(" + escapeString(referenceID) + ", " + escapeString(title) + ", " + escapeString(textContent)
/*Generated! Do not modify!*/ + ", " + escapeString(label) + ", " + escapeString(initialValueText) + ", " + escapeString(okText)
/*Generated! Do not modify!*/ + ", " + escapeString(cancelText)+ ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ return new OverviewStringInputDialogOptions(this, inputDialogParameters);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewConfirmDialogOptions showConfirmDialog(String referenceID, String title, String textContent, String okText, String cancelText){
/*Generated! Do not modify!*/ ConfirmDialogParameters confirmDialogParameters = new ConfirmDialogParameters();
/*Generated! Do not modify!*/ confirmDialogParameters.setReferenceID(referenceID);
/*Generated! Do not modify!*/ confirmDialogParameters.setTitle(title);
/*Generated! Do not modify!*/ confirmDialogParameters.setTextContent(textContent);
/*Generated! Do not modify!*/ confirmDialogParameters.setOkText(okText);
/*Generated! Do not modify!*/ confirmDialogParameters.setCancelText(cancelText);
/*Generated! Do not modify!*/ replyDTO.setConfirmDialogParameters(confirmDialogParameters);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction("showConfirmDialog(" + escapeString(referenceID) + ", " + escapeString(title) + ", " + escapeString(textContent)
/*Generated! Do not modify!*/ + ", " + escapeString(okText) + ", " + escapeString(cancelText)+ ");");
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ return new OverviewConfirmDialogOptions(this, confirmDialogParameters);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewListChooserDialogOptions showListChooser(String referenceID, boolean multiSelect, boolean showFilter, String title, IDLabelImageAssetList items, Collection<String> selectedIDs){
/*Generated! Do not modify!*/ return showListChooser(referenceID, multiSelect, showFilter, title, DEFAULT_OK_TEXT, DEFAULT_CANCEL_TEXT, items, selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewListChooserDialogOptions showListChooser(String referenceID, boolean multiSelect, boolean showFilter, String title, IDLabelList items, Collection<String> selectedIDs){
/*Generated! Do not modify!*/ return showListChooser(referenceID, multiSelect, showFilter, title, DEFAULT_OK_TEXT, DEFAULT_CANCEL_TEXT, items, selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewListChooserDialogOptions showListChooser(String referenceID, boolean multiSelect, boolean showFilter, String title, String okText, String cancelText, IDLabelImageAssetList items, Collection<String> selectedIDs){
/*Generated! Do not modify!*/ ListChooserParameters parameters = createListChooserParameters(referenceID, multiSelect, showFilter, title, okText, cancelText);
/*Generated! Do not modify!*/ Set<String> selectedIDsSet = new HashSet<String>();
/*Generated! Do not modify!*/ if (selectedIDs != null){
/*Generated! Do not modify!*/ selectedIDsSet = new HashSet<String>(selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ List<ListChooserItem> chooserItems = new ArrayList<ListChooserItem>();
/*Generated! Do not modify!*/ for (IDLabelImageAsset i: items.getItems()){
/*Generated! Do not modify!*/ chooserItems.add(createItem(i.getID(), i.getLabel(), i.getImageAssetID(), selectedIDsSet.contains(i.getID())));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ parameters.setShowIcons(true);
/*Generated! Do not modify!*/ parameters.setItems(chooserItems);
/*Generated! Do not modify!*/ replyDTO.setListChooserParameters(parameters);
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.SHOW_LIST_CHOOSER_IMGS, "showListChooser(" + escapeString(referenceID) + ", " + multiSelect + ", " + showFilter + ", " + escapeString(title)
/*Generated! Do not modify!*/ + ", " + escapeString(okText) + ", " + escapeString(cancelText) + ", ", gson.toJson(items), selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ return new OverviewListChooserDialogOptions(this, parameters);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ public OverviewListChooserDialogOptions showListChooser(String referenceID, boolean multiSelect, boolean showFilter, String title, String okText, String cancelText, IDLabelList items, Collection<String> selectedIDs){
/*Generated! Do not modify!*/ ListChooserParameters parameters = createListChooserParameters(referenceID, multiSelect, showFilter, title, okText, cancelText);
/*Generated! Do not modify!*/ Set<String> selectedIDsSet = new HashSet<String>();
/*Generated! Do not modify!*/ if (selectedIDs != null){
/*Generated! Do not modify!*/ selectedIDsSet = new HashSet<String>(selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ List<ListChooserItem> chooserItems = new ArrayList<ListChooserItem>();
/*Generated! Do not modify!*/ for (IDLabel i: items.getItems()){
/*Generated! Do not modify!*/ chooserItems.add(createItem(i.getID(), i.getLabel(), null, selectedIDsSet.contains(i.getID())));
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ parameters.setShowIcons(false);
/*Generated! Do not modify!*/ parameters.setItems(chooserItems);
/*Generated! Do not modify!*/ replyDTO.setListChooserParameters(parameters);
/*Generated! Do not modify!*/ if (recordMode){
/*Generated! Do not modify!*/ addRecordedAction(ReplyActionType.SHOW_LIST_CHOOSER_TEXTS, "showListChooser(" + escapeString(referenceID) + ", " + multiSelect + ", " + showFilter + ", " + escapeString(title)
/*Generated! Do not modify!*/ + ", " + escapeString(okText) + ", " + escapeString(cancelText) + ", ", gson.toJson(items), selectedIDs);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/ return new OverviewListChooserDialogOptions(this, parameters);
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ private ListChooserParameters createListChooserParameters(String referenceID, boolean multiSelect, boolean showFilter, String title, String okText, String cancelText) {
/*Generated! Do not modify!*/ ListChooserParameters parameters = new ListChooserParameters();
/*Generated! Do not modify!*/ parameters.setReferenceID(referenceID);
/*Generated! Do not modify!*/ parameters.setMultiSelect(multiSelect);
/*Generated! Do not modify!*/ parameters.setShowFilter(showFilter);
/*Generated! Do not modify!*/ parameters.setTitle(title);
/*Generated! Do not modify!*/ parameters.setOkText(okText);
/*Generated! Do not modify!*/ parameters.setCancelText(cancelText);
/*Generated! Do not modify!*/ return parameters;
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ private ListChooserItem createItem(String id, String label, String imageAssetID, boolean selected) {
/*Generated! Do not modify!*/ ListChooserItem result = new ListChooserItem();
/*Generated! Do not modify!*/ result.setID(id);
/*Generated! Do not modify!*/ result.setLabel(label);
/*Generated! Do not modify!*/ result.setImageAssetID(imageAssetID);
/*Generated! Do not modify!*/ result.setSelected(selected);
/*Generated! Do not modify!*/ return result;
/*Generated! Do not modify!*/ }
/*Generated! Do not modify!*/
/*Generated! Do not modify!*/ }
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.cluster.dialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.dialog.EnterTextDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.www.RegisterTransServlet;
/**
*
* Dialog that allows you to edit the settings of the security service connection
*
* @see SlaveServer
* @author Matt
* @since 31-10-2006
*
*/
public class SlaveServerDialog extends Dialog {
private static Class<?> PKG = SlaveServerDialog.class; // for i18n purposes, needed by Translator2!!
private SlaveServer slaveServer;
private CTabFolder wTabFolder;
private FormData fdTabFolder;
private CTabItem wServiceTab, wProxyTab;
private Composite wServiceComp, wProxyComp;
private FormData fdServiceComp, fdProxyComp;
private Shell shell;
// Service
private Text wName;
private TextVar wHostname, wPort, wWebAppName, wUsername, wPassword;
private Button wMaster;
private Button wSSL;
// Proxy
private TextVar wProxyHost, wProxyPort, wNonProxyHosts;
private Button wOK, wCancel;
private ModifyListener lsMod;
private PropsUI props;
private int middle;
private int margin;
private SlaveServer originalServer;
private boolean ok;
public SlaveServerDialog( Shell par, SlaveServer slaveServer ) {
super( par, SWT.NONE );
this.slaveServer = (SlaveServer) slaveServer.clone();
this.slaveServer.shareVariablesWith( slaveServer );
this.originalServer = slaveServer;
props = PropsUI.getInstance();
ok = false;
}
public boolean open() {
Shell parent = getParent();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
shell.setImage( GUIResource.getInstance().getImageSlave() );
lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
slaveServer.setChanged();
}
};
middle = props.getMiddlePct();
margin = Const.MARGIN;
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setText( BaseMessages.getString( PKG, "SlaveServerDialog.Shell.Title" ) );
shell.setLayout( formLayout );
// First, add the buttons...
// Buttons
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
Button[] buttons = new Button[] { wOK, wCancel };
BaseStepDialog.positionBottomButtons( shell, buttons, margin, null );
// The rest stays above the buttons...
wTabFolder = new CTabFolder( shell, SWT.BORDER );
props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB );
addServiceTab();
addProxyTab();
fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 );
fdTabFolder.top = new FormAttachment( 0, margin );
fdTabFolder.right = new FormAttachment( 100, 0 );
fdTabFolder.bottom = new FormAttachment( wOK, -margin );
wTabFolder.setLayoutData( fdTabFolder );
// Add listeners
wOK.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event e ) {
ok();
}
} );
wCancel.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event e ) {
cancel();
}
} );
SelectionAdapter selAdapter = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wUsername.addSelectionListener( selAdapter );
wPassword.addSelectionListener( selAdapter );
wHostname.addSelectionListener( selAdapter );
wPort.addSelectionListener( selAdapter );
wWebAppName.addSelectionListener( selAdapter );
wProxyHost.addSelectionListener( selAdapter );
wProxyPort.addSelectionListener( selAdapter );
wNonProxyHosts.addSelectionListener( selAdapter );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
wTabFolder.setSelection( 0 );
getData();
BaseStepDialog.setSize( shell );
shell.open();
Display display = parent.getDisplay();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return ok;
}
private void addServiceTab() {
// ////////////////////////
// START OF DB TAB ///
// ////////////////////////
wServiceTab = new CTabItem( wTabFolder, SWT.NONE );
wServiceTab.setText( BaseMessages.getString( PKG, "SlaveServerDialog.USER_TAB_SERVICE" ) );
wServiceComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wServiceComp );
FormLayout GenLayout = new FormLayout();
GenLayout.marginWidth = Const.FORM_MARGIN;
GenLayout.marginHeight = Const.FORM_MARGIN;
wServiceComp.setLayout( GenLayout );
// What's the name
Label wlName = new Label( wServiceComp, SWT.RIGHT );
props.setLook( wlName );
wlName.setText( BaseMessages.getString( PKG, "SlaveServerDialog.ServerName.Label" ) );
FormData fdlName = new FormData();
fdlName.top = new FormAttachment( 0, 0 );
fdlName.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlName.right = new FormAttachment( middle, -margin );
wlName.setLayoutData( fdlName );
wName = new Text( wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wName );
wName.addModifyListener( lsMod );
FormData fdName = new FormData();
fdName.top = new FormAttachment( 0, 0 );
fdName.left = new FormAttachment( middle, 0 ); // To the right of the label
fdName.right = new FormAttachment( 95, 0 );
wName.setLayoutData( fdName );
// What's the hostname
Label wlHostname = new Label( wServiceComp, SWT.RIGHT );
props.setLook( wlHostname );
wlHostname.setText( BaseMessages.getString( PKG, "SlaveServerDialog.HostIP.Label" ) );
FormData fdlHostname = new FormData();
fdlHostname.top = new FormAttachment( wName, margin * 2 );
fdlHostname.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlHostname.right = new FormAttachment( middle, -margin );
wlHostname.setLayoutData( fdlHostname );
wHostname = new TextVar( slaveServer, wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wHostname );
wHostname.addModifyListener( lsMod );
FormData fdHostname = new FormData();
fdHostname.top = new FormAttachment( wName, margin * 2 );
fdHostname.left = new FormAttachment( middle, 0 ); // To the right of the label
fdHostname.right = new FormAttachment( 95, 0 );
wHostname.setLayoutData( fdHostname );
// What's the service URL?
Label wlPort = new Label( wServiceComp, SWT.RIGHT );
props.setLook( wlPort );
wlPort.setText( BaseMessages.getString( PKG, "SlaveServerDialog.Port.Label" ) );
FormData fdlPort = new FormData();
fdlPort.top = new FormAttachment( wHostname, margin );
fdlPort.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlPort.right = new FormAttachment( middle, -margin );
wlPort.setLayoutData( fdlPort );
wPort = new TextVar( slaveServer, wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wPort );
wPort.addModifyListener( lsMod );
FormData fdPort = new FormData();
fdPort.top = new FormAttachment( wHostname, margin );
fdPort.left = new FormAttachment( middle, 0 ); // To the right of the label
fdPort.right = new FormAttachment( 95, 0 );
wPort.setLayoutData( fdPort );
// webapp name (optional)
Label wlWebAppName = new Label( wServiceComp, SWT.RIGHT );
wlWebAppName.setText( BaseMessages.getString( PKG, "SlaveServerDialog.WebAppName.Label" ) );
props.setLook( wlWebAppName );
FormData fdlWebAppName = new FormData();
fdlWebAppName.top = new FormAttachment( wPort, margin );
fdlWebAppName.left = new FormAttachment( 0, 0 );
fdlWebAppName.right = new FormAttachment( middle, -margin );
wlWebAppName.setLayoutData( fdlWebAppName );
wWebAppName = new TextVar( slaveServer, wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wWebAppName );
wWebAppName.addModifyListener( lsMod );
FormData fdWebAppName = new FormData();
fdWebAppName.top = new FormAttachment( wPort, margin );
fdWebAppName.left = new FormAttachment( middle, 0 );
fdWebAppName.right = new FormAttachment( 95, 0 );
wWebAppName.setLayoutData( fdWebAppName );
// Username
Label wlUsername = new Label( wServiceComp, SWT.RIGHT );
wlUsername.setText( BaseMessages.getString( PKG, "SlaveServerDialog.UserName.Label" ) );
props.setLook( wlUsername );
FormData fdlUsername = new FormData();
fdlUsername.top = new FormAttachment( wWebAppName, margin );
fdlUsername.left = new FormAttachment( 0, 0 );
fdlUsername.right = new FormAttachment( middle, -margin );
wlUsername.setLayoutData( fdlUsername );
wUsername = new TextVar( slaveServer, wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wUsername );
wUsername.addModifyListener( lsMod );
FormData fdUsername = new FormData();
fdUsername.top = new FormAttachment( wWebAppName, margin );
fdUsername.left = new FormAttachment( middle, 0 );
fdUsername.right = new FormAttachment( 95, 0 );
wUsername.setLayoutData( fdUsername );
// Password
Label wlPassword = new Label( wServiceComp, SWT.RIGHT );
wlPassword.setText( BaseMessages.getString( PKG, "SlaveServerDialog.Password.Label" ) );
props.setLook( wlPassword );
FormData fdlPassword = new FormData();
fdlPassword.top = new FormAttachment( wUsername, margin );
fdlPassword.left = new FormAttachment( 0, 0 );
fdlPassword.right = new FormAttachment( middle, -margin );
wlPassword.setLayoutData( fdlPassword );
wPassword = new TextVar( slaveServer, wServiceComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wPassword );
wPassword.setEchoChar( '*' );
wPassword.addModifyListener( lsMod );
FormData fdPassword = new FormData();
fdPassword.top = new FormAttachment( wUsername, margin );
fdPassword.left = new FormAttachment( middle, 0 );
fdPassword.right = new FormAttachment( 95, 0 );
wPassword.setLayoutData( fdPassword );
// Master
Label wlMaster = new Label( wServiceComp, SWT.RIGHT );
wlMaster.setText( BaseMessages.getString( PKG, "SlaveServerDialog.IsTheMaster.Label" ) );
props.setLook( wlMaster );
FormData fdlMaster = new FormData();
fdlMaster.top = new FormAttachment( wPassword, margin );
fdlMaster.left = new FormAttachment( 0, 0 );
fdlMaster.right = new FormAttachment( middle, -margin );
wlMaster.setLayoutData( fdlMaster );
wMaster = new Button( wServiceComp, SWT.CHECK );
props.setLook( wMaster );
FormData fdMaster = new FormData();
fdMaster.top = new FormAttachment( wPassword, margin );
fdMaster.left = new FormAttachment( middle, 0 );
fdMaster.right = new FormAttachment( 95, 0 );
wMaster.setLayoutData( fdMaster );
// Https
Control lastControl = wMaster;
{
Label wlSSL = new Label( wServiceComp, SWT.RIGHT );
wlSSL.setText( BaseMessages.getString( PKG, "SlaveServerDialog.UseSsl.Label" ) );
props.setLook( wlSSL );
FormData fd = new FormData();
fd.top = new FormAttachment( lastControl, margin );
fd.left = new FormAttachment( 0, 0 );
fd.right = new FormAttachment( middle, -margin );
wlSSL.setLayoutData( fd );
wlSSL.setVisible( false ); // future functional
}
{
wSSL = new Button( wServiceComp, SWT.CHECK );
props.setLook( wSSL );
FormData fd = new FormData();
fd.top = new FormAttachment( lastControl, margin );
fd.left = new FormAttachment( middle, 0 );
fd.right = new FormAttachment( 95, 0 );
wSSL.setLayoutData( fd );
wSSL.setVisible( false ); // future functional
}
fdServiceComp = new FormData();
fdServiceComp.left = new FormAttachment( 0, 0 );
fdServiceComp.top = new FormAttachment( 0, 0 );
fdServiceComp.right = new FormAttachment( 100, 0 );
fdServiceComp.bottom = new FormAttachment( 100, 0 );
wServiceComp.setLayoutData( fdServiceComp );
wServiceComp.layout();
wServiceTab.setControl( wServiceComp );
// ///////////////////////////////////////////////////////////
// / END OF GEN TAB
// ///////////////////////////////////////////////////////////
}
private void addProxyTab() {
// ////////////////////////
// START OF POOL TAB///
// /
wProxyTab = new CTabItem( wTabFolder, SWT.NONE );
wProxyTab.setText( BaseMessages.getString( PKG, "SlaveServerDialog.USER_TAB_PROXY" ) );
FormLayout poolLayout = new FormLayout();
poolLayout.marginWidth = Const.FORM_MARGIN;
poolLayout.marginHeight = Const.FORM_MARGIN;
wProxyComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wProxyComp );
wProxyComp.setLayout( poolLayout );
// What's the data tablespace name?
Label wlProxyHost = new Label( wProxyComp, SWT.RIGHT );
props.setLook( wlProxyHost );
wlProxyHost.setText( BaseMessages.getString( PKG, "SlaveServerDialog.ProxyServerName.Label" ) );
FormData fdlProxyHost = new FormData();
fdlProxyHost.top = new FormAttachment( 0, 0 );
fdlProxyHost.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlProxyHost.right = new FormAttachment( middle, -margin );
wlProxyHost.setLayoutData( fdlProxyHost );
wProxyHost = new TextVar( slaveServer, wProxyComp, SWT.BORDER | SWT.LEFT | SWT.SINGLE );
props.setLook( wProxyHost );
wProxyHost.addModifyListener( lsMod );
FormData fdProxyHost = new FormData();
fdProxyHost.top = new FormAttachment( 0, 0 );
fdProxyHost.left = new FormAttachment( middle, 0 ); // To the right of the label
fdProxyHost.right = new FormAttachment( 95, 0 );
wProxyHost.setLayoutData( fdProxyHost );
// What's the initial pool size
Label wlProxyPort = new Label( wProxyComp, SWT.RIGHT );
props.setLook( wlProxyPort );
wlProxyPort.setText( BaseMessages.getString( PKG, "SlaveServerDialog.ProxyServerPort.Label" ) );
FormData fdlProxyPort = new FormData();
fdlProxyPort.top = new FormAttachment( wProxyHost, margin );
fdlProxyPort.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlProxyPort.right = new FormAttachment( middle, -margin );
wlProxyPort.setLayoutData( fdlProxyPort );
wProxyPort = new TextVar( slaveServer, wProxyComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wProxyPort );
wProxyPort.addModifyListener( lsMod );
FormData fdProxyPort = new FormData();
fdProxyPort.top = new FormAttachment( wProxyHost, margin );
fdProxyPort.left = new FormAttachment( middle, 0 ); // To the right of the label
fdProxyPort.right = new FormAttachment( 95, 0 );
wProxyPort.setLayoutData( fdProxyPort );
// What's the maximum pool size
Label wlNonProxyHosts = new Label( wProxyComp, SWT.RIGHT );
props.setLook( wlNonProxyHosts );
wlNonProxyHosts.setText( BaseMessages.getString( PKG, "SlaveServerDialog.IgnoreProxyForHosts.Label" ) );
FormData fdlNonProxyHosts = new FormData();
fdlNonProxyHosts.top = new FormAttachment( wProxyPort, margin );
fdlNonProxyHosts.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdlNonProxyHosts.right = new FormAttachment( middle, -margin );
wlNonProxyHosts.setLayoutData( fdlNonProxyHosts );
wNonProxyHosts = new TextVar( slaveServer, wProxyComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wNonProxyHosts );
wNonProxyHosts.addModifyListener( lsMod );
FormData fdNonProxyHosts = new FormData();
fdNonProxyHosts.top = new FormAttachment( wProxyPort, margin );
fdNonProxyHosts.left = new FormAttachment( middle, 0 ); // To the right of the label
fdNonProxyHosts.right = new FormAttachment( 95, 0 );
wNonProxyHosts.setLayoutData( fdNonProxyHosts );
fdProxyComp = new FormData();
fdProxyComp.left = new FormAttachment( 0, 0 );
fdProxyComp.top = new FormAttachment( 0, 0 );
fdProxyComp.right = new FormAttachment( 100, 0 );
fdProxyComp.bottom = new FormAttachment( 100, 0 );
wProxyComp.setLayoutData( fdProxyComp );
wProxyComp.layout();
wProxyTab.setControl( wProxyComp );
}
public void dispose() {
props.setScreen( new WindowProperty( shell ) );
shell.dispose();
}
public void getData() {
wName.setText( Const.NVL( slaveServer.getName(), "" ) );
wHostname.setText( Const.NVL( slaveServer.getHostname(), "" ) );
wPort.setText( Const.NVL( slaveServer.getPort(), "" ) );
wWebAppName.setText( Const.NVL( slaveServer.getWebAppName(), "" ) );
wUsername.setText( Const.NVL( slaveServer.getUsername(), "" ) );
wPassword.setText( Const.NVL( slaveServer.getPassword(), "" ) );
wProxyHost.setText( Const.NVL( slaveServer.getProxyHostname(), "" ) );
wProxyPort.setText( Const.NVL( slaveServer.getProxyPort(), "" ) );
wNonProxyHosts.setText( Const.NVL( slaveServer.getNonProxyHosts(), "" ) );
wMaster.setSelection( slaveServer.isMaster() );
wSSL.setSelection( slaveServer.isSslMode() );
wName.setFocus();
}
private void cancel() {
originalServer = null;
dispose();
}
public void ok() {
getInfo();
originalServer.setName( slaveServer.getName() );
originalServer.setHostname( slaveServer.getHostname() );
originalServer.setPort( slaveServer.getPort() );
originalServer.setWebAppName( slaveServer.getWebAppName() );
originalServer.setUsername( slaveServer.getUsername() );
originalServer.setPassword( slaveServer.getPassword() );
originalServer.setProxyHostname( slaveServer.getProxyHostname() );
originalServer.setProxyPort( slaveServer.getProxyPort() );
originalServer.setNonProxyHosts( slaveServer.getNonProxyHosts() );
originalServer.setMaster( slaveServer.isMaster() );
originalServer.setSslMode( slaveServer.isSslMode() );
originalServer.setChanged();
ok = true;
dispose();
}
// Get dialog info in securityService
private void getInfo() {
slaveServer.setName( wName.getText() );
slaveServer.setHostname( wHostname.getText() );
slaveServer.setPort( wPort.getText() );
slaveServer.setWebAppName( wWebAppName.getText() );
slaveServer.setUsername( wUsername.getText() );
slaveServer.setPassword( wPassword.getText() );
slaveServer.setProxyHostname( wProxyHost.getText() );
slaveServer.setProxyPort( wProxyPort.getText() );
slaveServer.setNonProxyHosts( wNonProxyHosts.getText() );
slaveServer.setMaster( wMaster.getSelection() );
slaveServer.setSslMode( wSSL.getSelection() );
}
public void test() {
try {
getInfo();
String xml = "<sample/>";
String reply = slaveServer.sendXML( xml, RegisterTransServlet.CONTEXT_PATH );
String message =
BaseMessages.getString( PKG, "SlaveServer.Replay.Info1" )
+ slaveServer.constructUrl( RegisterTransServlet.CONTEXT_PATH ) + Const.CR
+ BaseMessages.getString( PKG, "SlaveServer.Replay.Info2" ) + Const.CR + Const.CR;
message += xml;
message += Const.CR + Const.CR;
message += "Reply was:" + Const.CR + Const.CR;
message += reply + Const.CR;
EnterTextDialog dialog =
new EnterTextDialog(
shell, "XML", BaseMessages.getString( PKG, "SlaveServer.RetournedXMLInfo" ), message );
dialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "SlaveServer.ExceptionError" ), BaseMessages.getString(
PKG, "SlaveServer.ExceptionUnableGetReplay.Error1" )
+ slaveServer.getHostname()
+ BaseMessages.getString( PKG, "SlaveServer.ExceptionUnableGetReplay.Error2" ), e );
}
}
}
| |
/*
* Copyright 2015-2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.rest;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.Response.Status.FORBIDDEN;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.hawkular.inventory.api.Environments;
import org.hawkular.inventory.api.Feeds;
import org.hawkular.inventory.api.Metrics;
import org.hawkular.inventory.api.Parents;
import org.hawkular.inventory.api.model.Metric;
import org.hawkular.inventory.api.paging.Page;
import org.hawkular.inventory.paths.CanonicalPath;
import org.hawkular.inventory.rest.json.ApiError;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
/**
* @author Lukas Krejci
* @since 1.0
*/
@Path("/")
@Produces(value = APPLICATION_JSON)
@Consumes(value = APPLICATION_JSON)
@Api(value = "/", description = "Metrics CRUD", tags = "Metrics")
public class RestMetrics extends RestBase {
@POST
@Path("/{environmentId}/metrics")
@ApiOperation("Creates a new metric in given environment")
@ApiResponses({
@ApiResponse(code = 201, message = "Metric created"),
@ApiResponse(code = 400, message = "Invalid inputs", response = ApiError.class),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 409, message = "Metric already exists", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response createMetric(@PathParam("environmentId") String environmentId,
@ApiParam(required = true) Metric.Blueprint metric, @Context UriInfo uriInfo) {
String tenantId = getTenantId();
CanonicalPath env = CanonicalPath.of().tenant(tenantId).environment(environmentId).get();
if (!security.canCreate(Metric.class).under(env)) {
return Response.status(FORBIDDEN).build();
}
Metric entity = createMetric(inventory.inspect(env, Environments.Single.class).metrics(), metric);
return ResponseUtil.created(entity, uriInfo, metric.getId()).build();
}
@POST
@Path("/feeds/{feedId}/metrics")
@ApiOperation("Creates a new metric in given feed")
@ApiResponses({
@ApiResponse(code = 201, message = "Metric created"),
@ApiResponse(code = 400, message = "Invalid inputs", response = ApiError.class),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 409, message = "Metric already exists", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response createMetricInFeed(@PathParam("feedId") String feedId,
@ApiParam(required = true) Metric.Blueprint metric, @Context UriInfo uriInfo) {
String tenantId = getTenantId();
CanonicalPath feed = CanonicalPath.of().tenant(tenantId).feed(feedId).get();
if (!security.canCreate(Metric.class).under(feed)) {
return Response.status(FORBIDDEN).build();
}
Metric entity = createMetric(inventory.inspect(feed, Feeds.Single.class).metrics(), metric);
return ResponseUtil.created(entity, uriInfo, metric.getId()).build();
}
private Metric createMetric(Metrics.ReadWrite accessInterface, Metric.Blueprint metric) {
if (metric == null) {
throw new IllegalArgumentException("metric to create not specified");
}
if (metric.getId() == null) {
throw new IllegalArgumentException("metric id not specified");
}
if (metric.getMetricTypePath() == null) {
throw new IllegalArgumentException("metric type id not specified");
}
return accessInterface.create(metric).entity();
}
@GET
@Path("/{environmentId}/metrics/{metricId}")
@ApiOperation("Retrieves a single metric")
@ApiResponses({
@ApiResponse(code = 200, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Rnvironment or metrics doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Metric getMetric(@PathParam("environmentId") String environmentId,
@PathParam("metricId") String metricId) {
return inventory.tenants().get(getTenantId()).environments().get(environmentId).metrics().get(metricId)
.entity();
}
@GET
@Path("/feeds/{feedId}/metrics/{metricId}")
@ApiOperation("Retrieves a single metric")
@ApiResponses({
@ApiResponse(code = 200, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Environment, feed or metric doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Metric getMetricInFeed(@PathParam("feedId") String feedId,
@PathParam("metricId") String metricId) {
return inventory.tenants().get(getTenantId()).feeds().get(feedId).metrics().get(metricId).entity();
}
@GET
@Path("/{environmentId}/metrics")
@ApiOperation("Retrieves all metrics in an environment. Accepts paging query parameters.")
@ApiResponses({
@ApiResponse(code = 200, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Tenant or environment doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response getMetrics(@PathParam("environmentId") String environmentId,
@QueryParam("feedless") @DefaultValue("false") boolean feedless, @Context UriInfo uriInfo) {
String tenantId = getTenantId();
Environments.Single envs = inventory.tenants().get(tenantId).environments().get(environmentId);
Page<Metric> ret = (feedless ? envs.metrics() : envs.metricsUnder(Parents.any())).getAll()
.entities(RequestUtil.extractPaging(uriInfo));
return pagedResponse(Response.ok(), uriInfo, ret).build();
}
@GET
@Path("/feeds/{feedId}/metrics")
@ApiOperation("Retrieves all metrics in a feed")
@ApiResponses({
@ApiResponse(code = 200, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Tenant, environment or feed doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response getMetrics(@PathParam("feedId") String feedId, @Context UriInfo uriInfo) {
Page<Metric> ret = inventory.tenants().get(getTenantId()).feeds().get(feedId).metricsUnder(Parents.any())
.getAll().entities(RequestUtil.extractPaging(uriInfo));
return pagedResponse(Response.ok(), uriInfo, ret).build();
}
@PUT
@Path("/{environmentId}/metrics/{metricId}")
@ApiOperation("Updates a metric")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Tenant, environment or the metric doesn't exist",
response = ApiError.class),
@ApiResponse(code = 400, message = "The update failed because of invalid data"),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response updateMetric(@PathParam("environmentId") String environmentId,
@PathParam("metricId") String metricId, Metric.Update update) {
String tenantId = getTenantId();
CanonicalPath env = CanonicalPath.of().tenant(tenantId).environment(environmentId).get();
if (!security.canUpdate(env.extend(Metric.SEGMENT_TYPE, metricId).get())) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(env, Environments.Single.class).metrics().update(metricId, update);
return Response.noContent().build();
}
@PUT
@Path("/feeds/{feedId}/metrics/{metricId}")
@ApiOperation("Updates a metric")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Tenant, feed or the metric doesn't exist",
response = ApiError.class),
@ApiResponse(code = 400, message = "The update failed because of invalid data"),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response updateMetricInFeed(@PathParam("feedId") String feedId, @PathParam("metricId") String metricId,
Metric.Update update) {
String tenantId = getTenantId();
CanonicalPath feed = CanonicalPath.of().tenant(tenantId).feed(feedId).get();
if (!security.canUpdate(feed.extend(Metric.SEGMENT_TYPE, metricId).get())) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(feed, Feeds.Single.class).metrics().update(metricId, update);
return Response.noContent().build();
}
@DELETE
@Path("/{environmentId}/metrics/{metricId}")
@ApiOperation("Deletes a metric")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 401, message = "Unauthorized access"),
@ApiResponse(code = 404, message = "Tenant, environment or the metric doesn't exist",
response = ApiError.class),
@ApiResponse(code = 400, message = "The delete failed because it would make inventory invalid"),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response deleteMetric(@PathParam("environmentId") String environmentId,
@PathParam("metricId") String metricId) {
String tenantId = getTenantId();
CanonicalPath env = CanonicalPath.of().tenant(tenantId).environment(environmentId).get();
if (!security.canDelete(env.extend(Metric.SEGMENT_TYPE, metricId).get())) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(env, Environments.Single.class).metrics().delete(metricId);
return Response.noContent().build();
}
@DELETE
@Path("/feeds/{feedId}/metrics/{metricId}")
@ApiOperation("Deletes a metric")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 404, message = "Tenant, feed or the metric doesn't exist", response = ApiError.class),
@ApiResponse(code = 400, message = "The delete failed because it would make inventory invalid"),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response deleteMetricInFeed(@PathParam("feedId") String feedId,
@PathParam("metricId") String metricId) {
String tenantId = getTenantId();
CanonicalPath feed = CanonicalPath.of().tenant(tenantId).feed(feedId).get();
if (!security.canDelete(feed.extend(Metric.SEGMENT_TYPE, metricId).get())) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(feed, Feeds.Single.class).metrics().delete(metricId);
return Response.noContent().build();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import java.util.Collections;
import java.util.Map;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.ResponseListener;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.monitoring.exporter.http.HttpResource.ResourcePublishResult;
import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.util.function.Supplier;
import static org.elasticsearch.xpack.monitoring.exporter.http.AsyncHttpResourceHelper.whenPerformRequestAsyncWith;
import static org.elasticsearch.xpack.monitoring.exporter.http.AsyncHttpResourceHelper.wrapMockListener;
import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
/**
* Tests {@link PublishableHttpResource}.
*/
public class PublishableHttpResourceTests extends AbstractPublishableHttpResourceTestCase {
private final String ownerType = "ownerthing";
private final String resourceBasePath = "/_fake";
private final String resourceName = ".my_thing";
private final String resourceType = "thingamajig";
private final Logger logger = mock(Logger.class);
private final HttpEntity entity = mock(HttpEntity.class);
private final Supplier<HttpEntity> body = () -> entity;
private final PublishableHttpResource resource =
new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS);
public void testCheckForResourceExists() throws IOException {
assertCheckForResource(successfulCheckStatus(), true, "{} [{}] found on the [{}] {}");
}
public void testCheckForResourceDoesNotExist() throws IOException {
assertCheckForResource(notFoundCheckStatus(), false, "{} [{}] does not exist on the [{}] {}");
}
public void testCheckForResourceUnexpectedResponse() throws IOException {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final RestStatus failedStatus = failedCheckStatus();
final Response response = response("GET", endpoint, failedStatus);
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, response);
assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, null, response);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
verifyNoMoreInteractions(client, logger);
}
public void testVersionCheckForResourceExists() {
assertVersionCheckForResource(successfulCheckStatus(), true, randomInt(), "{} [{}] found on the [{}] {}");
}
public void testVersionCheckForResourceDoesNotExist() {
if (randomBoolean()) {
// it literally does not exist
assertVersionCheckForResource(notFoundCheckStatus(), false,
randomInt(), "{} [{}] does not exist on the [{}] {}");
} else {
// it DOES exist, but the version needs to be replaced
assertVersionCheckForResource(successfulCheckStatus(), false,
randomInt(), "{} [{}] found on the [{}] {}");
}
}
public void testVersionCheckForResourceUnexpectedResponse() {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final RestStatus failedStatus = failedCheckStatus();
final Response response = response("GET", endpoint, failedStatus);
final XContent xContent = mock(XContent.class);
final int minimumVersion = randomInt();
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, response);
resource.versionCheckForResource(client, wrapMockListener(checkListener), logger,
resourceBasePath, resourceName, resourceType, owner, ownerType,
xContent, minimumVersion);
verifyCheckListener(null);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
verifyNoMoreInteractions(client, logger);
}
public void testVersionCheckForResourceMalformedResponse() {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final RestStatus okStatus = successfulCheckStatus();
final int minimumVersion = randomInt();
final HttpEntity entity = entityForResource(null, resourceName, minimumVersion);
final Response response = response("GET", endpoint, okStatus, entity);
final XContent xContent = mock(XContent.class);
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, response);
resource.versionCheckForResource(client, wrapMockListener(checkListener), logger,
resourceBasePath, resourceName, resourceType, owner, ownerType,
xContent, minimumVersion);
verifyCheckListener(null);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(logger).debug("{} [{}] found on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger, times(2)).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
verifyNoMoreInteractions(client, logger);
}
public void testCheckForResourceErrors() throws IOException {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final RestStatus failedStatus = failedCheckStatus();
final ResponseException responseException = responseException("GET", endpoint, failedStatus);
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
final Response response = e == responseException ? responseException.getResponse() : null;
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, e);
assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, null, response);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
verifyNoMoreInteractions(client, logger);
}
public void testPutResourceTrue() {
assertPutResource(successfulPublishStatus(), true);
}
public void testPutResourceFalse() {
assertPutResource(failedPublishStatus(), false);
}
public void testPutResourceFalseWithException() {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"));
final Request request = new Request("PUT", endpoint);
addParameters(request, resource.getDefaultParameters());
request.setEntity(entity);
whenPerformRequestAsyncWith(client, request, e);
final Map<String, String> parameters = Collections.emptyMap();
resource.putResource(client, wrapMockListener(publishListener), logger, resourceBasePath, resourceName, parameters, body,
resourceType, owner, ownerType);
verifyPublishListener(null);
verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
verifyNoMoreInteractions(client, logger);
}
public void testDeleteResourceTrue() {
final RestStatus status = randomFrom(successfulCheckStatus(), notFoundCheckStatus());
assertDeleteResource(status, true);
}
public void testDeleteResourceFalse() {
assertDeleteResource(failedCheckStatus(), false);
}
public void testDeleteResourceErrors() {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final RestStatus failedStatus = failedCheckStatus();
final ResponseException responseException = responseException("DELETE", endpoint, failedStatus);
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
final Map<String, String> deleteParameters = deleteParameters(resource.getDefaultParameters());
final Request request = new Request("DELETE", endpoint);
addParameters(request, deleteParameters);
whenPerformRequestAsyncWith(client, request, e);
resource.deleteResource(client, wrapMockListener(checkListener), logger, resourceBasePath, resourceName, resourceType, owner,
ownerType);
verifyCheckListener(null);
verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
verifyNoMoreInteractions(client, logger);
}
public void testParameters() {
assertParameters(resource);
}
public void testDoCheckAndPublishIgnoresPublishWhenCheckErrors() {
final PublishableHttpResource resource =
new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, null, true);
resource.doCheckAndPublish(client, wrapMockListener(publishListener));
verifyPublishListener(null);
}
public void testDoCheckAndPublish() {
// not an error (the third state)
final boolean exists = randomBoolean();
final boolean publish = randomBoolean();
final PublishableHttpResource resource =
new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, exists, publish);
resource.doCheckAndPublish(client, wrapMockListener(publishListener));
verifyPublishListener(new ResourcePublishResult(exists || publish));
}
public void testShouldReplaceResourceRethrowsIOException() throws IOException {
final Response response = mock(Response.class);
final HttpEntity entity = mock(HttpEntity.class);
final XContent xContent = mock(XContent.class);
when(response.getEntity()).thenReturn(entity);
when(entity.getContent()).thenThrow(new IOException("TEST - expected"));
expectThrows(IOException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt()));
}
public void testShouldReplaceResourceThrowsExceptionForMalformedResponse() {
final Response response = mock(Response.class);
final HttpEntity entity = entityForResource(null, resourceName, randomInt());
final XContent xContent = XContentType.JSON.xContent();
when(response.getEntity()).thenReturn(entity);
expectThrows(RuntimeException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt()));
}
public void testShouldReplaceResourceReturnsTrueVersionIsNotExpected() throws IOException {
final int minimumVersion = randomInt();
final Response response = mock(Response.class);
final HttpEntity entity = entityForResource(false, resourceName, minimumVersion);
final XContent xContent = XContentType.JSON.xContent();
when(response.getEntity()).thenReturn(entity);
assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(true));
}
public void testShouldReplaceResourceChecksVersion() throws IOException {
final int minimumVersion = randomInt();
final int version = randomInt();
final boolean shouldReplace = version < minimumVersion;
final Response response = mock(Response.class);
// { "resourceName": { "version": randomLong } }
final HttpEntity entity =
new StringEntity("{\"" + resourceName + "\":{\"version\":" + version + "}}", ContentType.APPLICATION_JSON);
final XContent xContent = XContentType.JSON.xContent();
when(response.getEntity()).thenReturn(entity);
assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(shouldReplace));
}
@SuppressLoggerChecks(reason = "mock logger used")
private void assertCheckForResource(final RestStatus status, final Boolean expected, final String debugLogMessage)
throws IOException {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final Response response = response("GET", endpoint, status);
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, response);
assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, expected, response);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
if (expected != null) {
verify(response).getStatusLine();
} else {
verify(response).getStatusLine();
verify(response).getRequestLine();
verify(response).getHost();
verify(response).getEntity();
}
verify(logger).debug(debugLogMessage, resourceType, resourceName, owner, ownerType);
verifyNoMoreInteractions(client, response, logger);
}
@SuppressLoggerChecks(reason = "mock logger used")
private void assertVersionCheckForResource(final RestStatus status, final Boolean expected,
final int minimumVersion,
final String debugLogMessage) {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final boolean shouldReplace = status == RestStatus.OK && expected == Boolean.FALSE;
final HttpEntity entity = status == RestStatus.OK ? entityForResource(expected, resourceName, minimumVersion) : null;
final Response response = response("GET", endpoint, status, entity);
final XContent xContent = XContentType.JSON.xContent();
final Request request = new Request("GET", endpoint);
addParameters(request, getParameters(resource.getDefaultParameters()));
whenPerformRequestAsyncWith(client, request, response);
resource.versionCheckForResource(client, wrapMockListener(checkListener), logger,
resourceBasePath, resourceName, resourceType, owner, ownerType,
xContent, minimumVersion);
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
if (shouldReplace || expected) {
verify(response).getStatusLine();
verify(response).getEntity();
} else if (expected == false) {
verify(response).getStatusLine();
} else { // expected == null
verify(response).getStatusLine();
verify(response).getRequestLine();
verify(response).getHost();
verify(response).getEntity();
}
verifyCheckListener(expected);
verify(logger).debug(debugLogMessage, resourceType, resourceName, owner, ownerType);
verifyNoMoreInteractions(client, response, logger);
}
private void assertPutResource(final RestStatus status, final boolean errorFree) {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final Response response = response("PUT", endpoint, status);
final Request request = new Request("PUT", endpoint);
addParameters(request, resource.getDefaultParameters());
request.setEntity(entity);
whenPerformRequestAsyncWith(client, request, response);
final Map<String, String> parameters = Collections.emptyMap();
resource.putResource(client, wrapMockListener(publishListener), logger, resourceBasePath, resourceName, parameters, body,
resourceType, owner, ownerType);
verifyPublishListener(errorFree ? ResourcePublishResult.ready() : null);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(response).getStatusLine();
verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType);
if (errorFree) {
verify(logger).debug("{} [{}] uploaded to the [{}] {}", resourceType, resourceName, owner, ownerType);
} else {
ArgumentCaptor<RuntimeException> e = ArgumentCaptor.forClass(RuntimeException.class);
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), e.capture());
assertThat(e.getValue().getMessage(),
is("[" + resourceBasePath + "/" + resourceName + "] responded with [" + status.getStatus() + "]"));
}
verifyNoMoreInteractions(client, response, logger, entity);
}
@SuppressWarnings("unchecked")
private void assertCheckForResource(final RestClient client, final Logger logger,
final String resourceBasePath, final String resourceName, final String resourceType,
final Boolean expected, final Response response)
throws IOException {
final CheckedFunction<Response, Boolean, IOException> responseChecker = mock(CheckedFunction.class);
final CheckedFunction<Response, Boolean, IOException> dneResponseChecker = mock(CheckedFunction.class);
if (expected != null) {
// invert expected to keep the same value
when(responseChecker.apply(response)).thenReturn(false == expected);
when(dneResponseChecker.apply(response)).thenReturn(false == expected);
}
resource.checkForResource(client, wrapMockListener(checkListener), logger, resourceBasePath, resourceName, resourceType, owner,
ownerType, PublishableHttpResource.GET_EXISTS, PublishableHttpResource.GET_DOES_NOT_EXIST,
responseChecker, dneResponseChecker);
if (expected == Boolean.TRUE) {
verify(responseChecker).apply(response);
verifyZeroInteractions(dneResponseChecker);
} else if (expected == Boolean.FALSE) {
verifyZeroInteractions(responseChecker);
verify(dneResponseChecker).apply(response);
} else {
verifyZeroInteractions(responseChecker, dneResponseChecker);
}
verifyCheckListener(expected);
}
private void assertDeleteResource(final RestStatus status, final boolean expected) {
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
final Response response = response("DELETE", endpoint, status);
final Map<String, String> deleteParameters = deleteParameters(resource.getDefaultParameters());
final Request request = new Request("DELETE", endpoint);
addParameters(request, deleteParameters);
whenPerformRequestAsyncWith(client, request, response);
resource.deleteResource(client, wrapMockListener(checkListener), logger, resourceBasePath, resourceName, resourceType, owner,
ownerType);
verify(client).performRequestAsync(eq(request), any(ResponseListener.class));
verify(response).getStatusLine();
verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType);
if (expected) {
verify(logger).debug("{} [{}] deleted from the [{}] {}", resourceType, resourceName, owner, ownerType);
verifyCheckListener(true);
} else {
ArgumentCaptor<RuntimeException> e = ArgumentCaptor.forClass(RuntimeException.class);
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), e.capture());
assertThat(e.getValue().getMessage(),
is("[" + resourceBasePath + "/" + resourceName + "] responded with [" + status.getStatus() + "]"));
verifyCheckListener(null);
}
verifyNoMoreInteractions(client, response, logger, entity);
}
}
| |
/*
* Copyright (c) 2002-2012, the original author or authors.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*
* http://www.opensource.org/licenses/bsd-license.php
*/
package jline;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import jline.internal.Configuration;
import jline.internal.Log;
import org.fusesource.jansi.internal.WindowsSupport;
import org.fusesource.jansi.internal.Kernel32;
import static org.fusesource.jansi.internal.Kernel32.*;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_ECHO_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_LINE_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_PROCESSED_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_WINDOW_INPUT;
import static jline.internal.Preconditions.checkNotNull;
/**
* Terminal implementation for Microsoft Windows. Terminal initialization in
* {@link #init} is accomplished by extracting the
* <em>jline_<i>version</i>.dll</em>, saving it to the system temporary
* directoy (determined by the setting of the <em>java.io.tmpdir</em> System
* property), loading the library, and then calling the Win32 APIs <a
* href="http://msdn.microsoft.com/library/default.asp?
* url=/library/en-us/dllproc/base/setconsolemode.asp">SetConsoleMode</a> and
* <a href="http://msdn.microsoft.com/library/default.asp?
* url=/library/en-us/dllproc/base/getconsolemode.asp">GetConsoleMode</a> to
* disable character echoing.
* <p/>
* <p>
* By default, the {@link #wrapInIfNeeded(java.io.InputStream)} method will attempt
* to test to see if the specified {@link InputStream} is {@link System#in} or a wrapper
* around {@link FileDescriptor#in}, and if so, will bypass the character reading to
* directly invoke the readc() method in the JNI library. This is so the class
* can read special keys (like arrow keys) which are otherwise inaccessible via
* the {@link System#in} stream. Using JNI reading can be bypassed by setting
* the <code>jline.WindowsTerminal.directConsole</code> system property
* to <code>false</code>.
* </p>
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @since 2.0
*/
public class WindowsTerminal
extends TerminalSupport
{
public static final String DIRECT_CONSOLE = WindowsTerminal.class.getName() + ".directConsole";
public static final String ANSI = WindowsTerminal.class.getName() + ".ansi";
private boolean directConsole;
private int originalMode;
public WindowsTerminal() throws Exception {
super(true);
}
@Override
public void init() throws Exception {
super.init();
setAnsiSupported(Configuration.getBoolean(ANSI, true));
//
// FIXME: Need a way to disable direct console and sysin detection muck
//
setDirectConsole(Configuration.getBoolean(DIRECT_CONSOLE, true));
this.originalMode = getConsoleMode();
setConsoleMode(originalMode & ~ENABLE_ECHO_INPUT.code);
setEchoEnabled(false);
}
/**
* Restore the original terminal configuration, which can be used when
* shutting down the console reader. The ConsoleReader cannot be
* used after calling this method.
*/
@Override
public void restore() throws Exception {
// restore the old console mode
setConsoleMode(originalMode);
super.restore();
}
@Override
public int getWidth() {
int w = getWindowsTerminalWidth();
return w < 1 ? DEFAULT_WIDTH : w;
}
@Override
public int getHeight() {
int h = getWindowsTerminalHeight();
return h < 1 ? DEFAULT_HEIGHT : h;
}
@Override
public void setEchoEnabled(final boolean enabled) {
// Must set these four modes at the same time to make it work fine.
if (enabled) {
setConsoleMode(getConsoleMode() |
ENABLE_ECHO_INPUT.code |
ENABLE_LINE_INPUT.code |
ENABLE_PROCESSED_INPUT.code |
ENABLE_WINDOW_INPUT.code);
}
else {
setConsoleMode(getConsoleMode() &
~(ENABLE_LINE_INPUT.code |
ENABLE_ECHO_INPUT.code |
ENABLE_PROCESSED_INPUT.code |
ENABLE_WINDOW_INPUT.code));
}
super.setEchoEnabled(enabled);
}
/**
* Whether or not to allow the use of the JNI console interaction.
*/
public void setDirectConsole(final boolean flag) {
this.directConsole = flag;
Log.debug("Direct console: ", flag);
}
/**
* Whether or not to allow the use of the JNI console interaction.
*/
public Boolean getDirectConsole() {
return directConsole;
}
@Override
public InputStream wrapInIfNeeded(InputStream in) throws IOException {
if (directConsole && isSystemIn(in)) {
return new InputStream() {
private byte[] buf = null;
int bufIdx = 0;
@Override
public int read() throws IOException {
while (buf == null || bufIdx == buf.length) {
buf = readConsoleInput();
bufIdx = 0;
}
int c = buf[bufIdx] & 0xFF;
bufIdx++;
return c;
}
};
} else {
return super.wrapInIfNeeded(in);
}
}
protected boolean isSystemIn(final InputStream in) throws IOException {
if (in == null) {
return false;
}
else if (in == System.in) {
return true;
}
else if (in instanceof FileInputStream && ((FileInputStream) in).getFD() == FileDescriptor.in) {
return true;
}
return false;
}
@Override
public String getOutputEncoding() {
int codepage = getConsoleOutputCodepage();
//http://docs.oracle.com/javase/6/docs/technotes/guides/intl/encoding.doc.html
String charsetMS = "ms" + codepage;
if (java.nio.charset.Charset.isSupported(charsetMS)) {
return charsetMS;
}
String charsetCP = "cp" + codepage;
if (java.nio.charset.Charset.isSupported(charsetCP)) {
return charsetCP;
}
Log.debug("can't figure out the Java Charset of this code page (" + codepage + ")...");
return super.getOutputEncoding();
}
//
// Native Bits
//
private int getConsoleMode() {
return WindowsSupport.getConsoleMode();
}
private void setConsoleMode(int mode) {
WindowsSupport.setConsoleMode(mode);
}
private byte[] readConsoleInput() {
// XXX does how many events to read in one call matter?
INPUT_RECORD[] events = null;
try {
events = WindowsSupport.readConsoleInput(1);
} catch (IOException e) {
Log.debug("read Windows console input error: ", e);
}
if (events == null) {
return new byte[0];
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < events.length; i++ ) {
KEY_EVENT_RECORD keyEvent = events[i].keyEvent;
//Log.trace(keyEvent.keyDown? "KEY_DOWN" : "KEY_UP", "key code:", keyEvent.keyCode, "char:", (long)keyEvent.uchar);
if (keyEvent.keyDown) {
if (keyEvent.uchar > 0) {
// support some C1 control sequences: ALT + [@-_] (and [a-z]?) => ESC <ascii>
// http://en.wikipedia.org/wiki/C0_and_C1_control_codes#C1_set
int altState = KEY_EVENT_RECORD.LEFT_ALT_PRESSED | KEY_EVENT_RECORD.RIGHT_ALT_PRESSED;
if (((keyEvent.uchar >= '@' && keyEvent.uchar <= '_') || (keyEvent.uchar >= 'a' && keyEvent.uchar <= 'z'))
&& (keyEvent.controlKeyState & altState) != 0) {
sb.append('\u001B'); // ESC
}
sb.append(keyEvent.uchar);
continue;
}
// virtual keycodes: http://msdn.microsoft.com/en-us/library/windows/desktop/dd375731(v=vs.85).aspx
// just add support for basic editing keys (no control state, no numpad keys)
String escapeSequence = null;
switch (keyEvent.keyCode) {
case 0x21: // VK_PRIOR PageUp
escapeSequence = "\u001B[5~";
break;
case 0x22: // VK_NEXT PageDown
escapeSequence = "\u001B[6~";
break;
case 0x23: // VK_END
escapeSequence = "\u001B[4~";
break;
case 0x24: // VK_HOME
escapeSequence = "\u001B[1~";
break;
case 0x25: // VK_LEFT
escapeSequence = "\u001B[D";
break;
case 0x26: // VK_UP
escapeSequence = "\u001B[A";
break;
case 0x27: // VK_RIGHT
escapeSequence = "\u001B[C";
break;
case 0x28: // VK_DOWN
escapeSequence = "\u001B[B";
break;
case 0x2D: // VK_INSERT
escapeSequence = "\u001B[2~";
break;
case 0x2E: // VK_DELETE
escapeSequence = "\u001B[3~";
break;
default:
break;
}
if (escapeSequence != null) {
for (int k = 0; k < keyEvent.repeatCount; k++) {
sb.append(escapeSequence);
}
}
} else {
// key up event
// support ALT+NumPad input method
if (keyEvent.keyCode == 0x12/*VK_MENU ALT key*/ && keyEvent.uchar > 0) {
sb.append(keyEvent.uchar);
}
}
}
return sb.toString().getBytes();
}
private int getConsoleOutputCodepage() {
return Kernel32.GetConsoleOutputCP();
}
private int getWindowsTerminalWidth() {
return WindowsSupport.getWindowsTerminalWidth();
}
private int getWindowsTerminalHeight() {
return WindowsSupport.getWindowsTerminalHeight();
}
/**
* Console mode
* <p/>
* Constants copied <tt>wincon.h</tt>.
*/
public static enum ConsoleMode
{
/**
* The ReadFile or ReadConsole function returns only when a carriage return
* character is read. If this mode is disable, the functions return when one
* or more characters are available.
*/
ENABLE_LINE_INPUT(2),
/**
* Characters read by the ReadFile or ReadConsole function are written to
* the active screen buffer as they are read. This mode can be used only if
* the ENABLE_LINE_INPUT mode is also enabled.
*/
ENABLE_ECHO_INPUT(4),
/**
* CTRL+C is processed by the system and is not placed in the input buffer.
* If the input buffer is being read by ReadFile or ReadConsole, other
* control keys are processed by the system and are not returned in the
* ReadFile or ReadConsole buffer. If the ENABLE_LINE_INPUT mode is also
* enabled, backspace, carriage return, and linefeed characters are handled
* by the system.
*/
ENABLE_PROCESSED_INPUT(1),
/**
* User interactions that change the size of the console screen buffer are
* reported in the console's input buffee. Information about these events
* can be read from the input buffer by applications using
* theReadConsoleInput function, but not by those using ReadFile
* orReadConsole.
*/
ENABLE_WINDOW_INPUT(8),
/**
* If the mouse pointer is within the borders of the console window and the
* window has the keyboard focus, mouse events generated by mouse movement
* and button presses are placed in the input buffer. These events are
* discarded by ReadFile or ReadConsole, even when this mode is enabled.
*/
ENABLE_MOUSE_INPUT(16),
/**
* When enabled, text entered in a console window will be inserted at the
* current cursor location and all text following that location will not be
* overwritten. When disabled, all following text will be overwritten. An OR
* operation must be performed with this flag and the ENABLE_EXTENDED_FLAGS
* flag to enable this functionality.
*/
ENABLE_PROCESSED_OUTPUT(1),
/**
* This flag enables the user to use the mouse to select and edit text. To
* enable this option, use the OR to combine this flag with
* ENABLE_EXTENDED_FLAGS.
*/
ENABLE_WRAP_AT_EOL_OUTPUT(2),;
public final int code;
ConsoleMode(final int code) {
this.code = code;
}
}
}
| |
/*
* Copyright 2013 Zakhar Prykhoda
*
* midao.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.midao.jdbc.core.transaction;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.midao.jdbc.core.MjdbcConfig;
import org.midao.jdbc.core.MjdbcFactory;
import org.midao.jdbc.core.handlers.input.query.QueryInputHandler;
import org.midao.jdbc.core.handlers.model.QueryParameters;
import org.midao.jdbc.core.handlers.output.MapOutputHandler;
import org.midao.jdbc.core.service.QueryRunnerService;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import javax.sql.DataSource;
import java.sql.*;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
/**
*
*/
public class BaseTransactionHandlerTest {
@Mock
TransactionHandler transactionHandler;
@Mock
Connection conn;
@Mock
Statement statement;
@Mock
PreparedStatement preparedStatement;
@Mock
CallableStatement callableStatement;
@Mock
DataSource ds;
String sql = "INSERT luck INTO world;";
MapOutputHandler outputHandler = new MapOutputHandler();
QueryInputHandler inputHandler = new QueryInputHandler(sql, new QueryParameters());
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this); // init mocks
when(ds.getConnection()).thenReturn(conn);
when(conn.createStatement()).thenReturn(statement);
when(conn.prepareStatement(any(String.class))).thenReturn(preparedStatement);
when(conn.prepareCall(any(String.class))).thenReturn(callableStatement);
MjdbcConfig.setDefaultTransactionHandler(BaseTransactionHandler.class);
}
@After
public void tearDown() throws Exception {
}
@Test
public void testManualModeUpdateDS() throws SQLException {
testManualModeDS("update", "commit");
}
@Test
public void testManualModeQueryDS() throws SQLException {
testManualModeDS("query", "commit");
}
@Test
public void testManualModeBatchDS() throws SQLException {
testManualModeDS("batch", "commit");
}
@Test
public void testManualModeCallDS() throws SQLException {
testManualModeDS("call", "commit");
}
@Test
public void testManualModeUpdateRollbackDS() throws SQLException {
testManualModeDS("update", "rollback");
}
@Test
public void testManualModeQueryRollbackDS() throws SQLException {
testManualModeDS("query", "rollback");
}
@Test
public void testManualModeBatchRollbackDS() throws SQLException {
testManualModeDS("batch", "rollback");
}
@Test
public void testManualModeCallRollbackDS() throws SQLException {
testManualModeDS("call", "rollback");
}
private void testManualModeDS(String operation, String type) throws SQLException {
QueryRunnerService queryRunner = null;
queryRunner = MjdbcFactory.getQueryRunner(ds);
queryRunner.setTransactionManualMode(true);
if ("update".equals(operation) == true) {
queryRunner.update(sql);
} else if ("query".equals(operation) == true) {
queryRunner.query(sql, outputHandler);
} else if ("batch".equals(operation) == true) {
queryRunner.batch(sql, new Object[0][0]);
} else if ("call".equals(operation) == true) {
queryRunner.call(inputHandler);
} else {
fail();
}
verify(conn, never()).close();
// in this scenario close should be invoked only with commit
if ("commit".equals(type) == true) {
queryRunner.commit();
verify(conn, times(1)).commit();
} else if ("rollback".equals(type) == true) {
queryRunner.rollback();
verify(conn, times(1)).rollback();
} else {
fail();
}
verify(ds, times(1)).getConnection();
verify(conn, times(1)).close();
queryRunner.setTransactionManualMode(false);
if ("update".equals(operation) == true) {
queryRunner.update(sql);
} else if ("query".equals(operation) == true) {
queryRunner.query(sql, outputHandler);
} else if ("batch".equals(operation) == true) {
queryRunner.batch(sql, new Object[0][0]);
} else if ("call".equals(operation) == true) {
queryRunner.call(inputHandler);
} else {
fail();
}
// in this scenario commit/close should be invoked after every operation
verify(conn, times(2)).close();
if ("commit".equals(type) == true) {
verify(conn, times(2)).commit();
} else if ("rollback".equals(type) == true) {
verify(conn, times(1)).commit();
verify(conn, times(1)).rollback();
} else {
fail();
}
if ("commit".equals(type) == true) {
queryRunner.commit();
// checking if there were no additional invocations
verify(conn, times(2)).commit();
} else if ("rollback".equals(type) == true) {
queryRunner.rollback();
// rollback shouldn't be invoked as commit is already performed
verify(conn, times(1)).rollback();
} else {
fail();
}
verify(ds, times(2)).getConnection();
verify(conn, times(2)).close();
}
@Test
public void testManualModeUpdateConn() throws SQLException {
testManualModeConn("update", "commit");
}
@Test
public void testManualModeQueryConn() throws SQLException {
testManualModeConn("query", "commit");
}
@Test
public void testManualModeBatchConn() throws SQLException {
testManualModeConn("batch", "commit");
}
@Test
public void testManualModeCallConn() throws SQLException {
testManualModeConn("call", "commit");
}
@Test
public void testManualModeUpdateRollbackConn() throws SQLException {
testManualModeConn("update", "rollback");
}
@Test
public void testManualModeQueryRollbackConn() throws SQLException {
testManualModeConn("query", "rollback");
}
@Test
public void testManualModeBatchRollbackConn() throws SQLException {
testManualModeConn("batch", "rollback");
}
@Test
public void testManualModeCallRollbackConn() throws SQLException {
testManualModeConn("call", "rollback");
}
private void testManualModeConn(String operation, String type) throws SQLException {
QueryRunnerService queryRunner = null;
queryRunner = MjdbcFactory.getQueryRunner(conn);
queryRunner.setTransactionManualMode(true);
if ("update".equals(operation) == true) {
queryRunner.update(sql);
} else if ("query".equals(operation) == true) {
queryRunner.query(sql, outputHandler);
} else if ("batch".equals(operation) == true) {
queryRunner.batch(sql, new Object[0][0]);
} else if ("call".equals(operation) == true) {
queryRunner.call(inputHandler);
} else {
fail();
}
// if transaction manager is created based on connection - it should never close it.
verify(conn, never()).close();
if ("commit".equals(type) == true) {
queryRunner.commit();
verify(conn, times(1)).commit();
} else if ("rollback".equals(type) == true) {
queryRunner.rollback();
verify(conn, times(1)).rollback();
} else {
fail();
}
verify(conn, never()).close();
queryRunner.setTransactionManualMode(false);
if ("update".equals(operation) == true) {
queryRunner.update(sql);
} else if ("query".equals(operation) == true) {
queryRunner.query(sql, outputHandler);
} else if ("batch".equals(operation) == true) {
queryRunner.batch(sql, new Object[0][0]);
} else if ("call".equals(operation) == true) {
queryRunner.call(inputHandler);
} else {
fail();
}
// if transaction manager is created based on connection - it should never close it, but commit should be invoked
verify(conn, never()).close();
if ("commit".equals(type) == true) {
verify(conn, times(2)).commit();
} else if ("rollback".equals(type) == true) {
verify(conn, times(1)).commit();
verify(conn, times(1)).rollback();
} else {
fail();
}
if ("commit".equals(type) == true) {
queryRunner.commit();
// commit should be invoked
verify(conn, times(3)).commit();
} else if ("rollback".equals(type) == true) {
queryRunner.rollback();
// rollback shouldn't be invoked as commit is already performed
verify(conn, times(2)).rollback();
verify(conn, times(1)).commit();
} else {
fail();
}
verify(conn, never()).close();
}
@Test
public void testIsolationLevelDS() throws SQLException {
QueryRunnerService queryRunner = null;
queryRunner = MjdbcFactory.getQueryRunner(ds);
testIsolationLevel(queryRunner);
verify(ds, times(4)).getConnection();
verify(conn, never()).setTransactionIsolation(any(int.class));
queryRunner.setTransactionIsolationLevel(1);
testIsolationLevel(queryRunner);
verify(ds, times(8)).getConnection();
verify(conn, times(4)).setTransactionIsolation(any(int.class));
}
@Test
public void testIsolationLevelConn() throws SQLException {
QueryRunnerService queryRunner = null;
queryRunner = MjdbcFactory.getQueryRunner(conn);
testIsolationLevel(queryRunner);
verify(conn, never()).setTransactionIsolation(any(int.class));
queryRunner.setTransactionIsolationLevel(1);
testIsolationLevel(queryRunner);
verify(conn, times(4)).setTransactionIsolation(any(int.class));
}
private void testIsolationLevel(QueryRunnerService queryRunner) throws SQLException {
queryRunner.update(sql);
queryRunner.query(sql, outputHandler);
queryRunner.batch(sql, new Object[0][0]);
queryRunner.call(inputHandler);
}
@Test
public void testSavepointDS() throws SQLException {
QueryRunnerService queryRunner = MjdbcFactory.getQueryRunner(ds);
// manual transaction control needed so connection won't be closed
queryRunner.setTransactionManualMode(true);
testSavepoint(queryRunner);
}
@Test
public void testSavepointConn() throws SQLException {
QueryRunnerService queryRunner = MjdbcFactory.getQueryRunner(conn);
testSavepoint(queryRunner);
}
private void testSavepoint(QueryRunnerService queryRunner) throws SQLException {
queryRunner.update(sql);
queryRunner.query(sql, outputHandler);
queryRunner.batch(sql, new Object[0][0]);
queryRunner.call(inputHandler);
queryRunner.setSavepoint();
queryRunner.setSavepoint("something");
queryRunner.releaseSavepoint(null);
queryRunner.rollback(null);
verify(conn, times(1)).setSavepoint();
verify(conn, times(1)).setSavepoint(any(String.class));
verify(conn, times(1)).releaseSavepoint(null);
verify(conn, times(1)).rollback(null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server.cluster.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQBuffers;
import org.apache.activemq.artemis.api.core.BroadcastEndpoint;
import org.apache.activemq.artemis.api.core.BroadcastEndpointFactory;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.management.CoreNotificationType;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.core.server.NodeManager;
import org.apache.activemq.artemis.core.server.cluster.BroadcastGroup;
import org.apache.activemq.artemis.core.server.management.Notification;
import org.apache.activemq.artemis.core.server.management.NotificationService;
import org.apache.activemq.artemis.utils.TypedProperties;
import org.apache.activemq.artemis.utils.UUIDGenerator;
/**
* <p>This class will use the {@link BroadcastEndpoint} to send periodical updates on the list for connections
* used by this server. </p>
*
* <p>This is totally generic to the mechanism used on the transmission. It originally only had UDP but this got refactored
* into sub classes of {@link BroadcastEndpoint}</p>
*/
public class BroadcastGroupImpl implements BroadcastGroup, Runnable {
private final NodeManager nodeManager;
private final String name;
private final List<TransportConfiguration> connectors = new ArrayList<>();
private boolean started;
private final long broadCastPeriod;
private final ScheduledExecutorService scheduledExecutor;
private ScheduledFuture<?> future;
private boolean loggedBroadcastException = false;
// Each broadcast group has a unique id - we use this to detect when more than one group broadcasts the same node id
// on the network which would be an error
private final String uniqueID;
private NotificationService notificationService;
private BroadcastEndpoint endpoint;
public BroadcastGroupImpl(final NodeManager nodeManager,
final String name,
final long broadCastPeriod,
final ScheduledExecutorService scheduledExecutor,
final BroadcastEndpointFactory endpointFactory) throws Exception {
this.nodeManager = nodeManager;
this.name = name;
this.scheduledExecutor = scheduledExecutor;
this.broadCastPeriod = broadCastPeriod;
this.endpoint = endpointFactory.createBroadcastEndpoint();
uniqueID = UUIDGenerator.getInstance().generateStringUUID();
}
@Override
public void setNotificationService(final NotificationService notificationService) {
this.notificationService = notificationService;
}
@Override
public synchronized void start() throws Exception {
if (started) {
return;
}
endpoint.openBroadcaster();
started = true;
if (notificationService != null) {
TypedProperties props = new TypedProperties();
props.putSimpleStringProperty(new SimpleString("name"), new SimpleString(name));
Notification notification = new Notification(nodeManager.getNodeId().toString(), CoreNotificationType.BROADCAST_GROUP_STARTED, props);
notificationService.sendNotification(notification);
}
activate();
}
@Override
public synchronized void stop() {
if (!started) {
return;
}
if (future != null) {
future.cancel(false);
}
try {
endpoint.close(true);
}
catch (Exception e1) {
ActiveMQServerLogger.LOGGER.broadcastGroupClosed(e1);
}
started = false;
if (notificationService != null) {
TypedProperties props = new TypedProperties();
props.putSimpleStringProperty(new SimpleString("name"), new SimpleString(name));
Notification notification = new Notification(nodeManager.getNodeId().toString(), CoreNotificationType.BROADCAST_GROUP_STOPPED, props);
try {
notificationService.sendNotification(notification);
}
catch (Exception e) {
ActiveMQServerLogger.LOGGER.broadcastGroupClosed(e);
}
}
}
@Override
public synchronized boolean isStarted() {
return started;
}
@Override
public String getName() {
return name;
}
@Override
public synchronized void addConnector(final TransportConfiguration tcConfig) {
connectors.add(tcConfig);
}
@Override
public synchronized void removeConnector(final TransportConfiguration tcConfig) {
connectors.remove(tcConfig);
}
@Override
public synchronized int size() {
return connectors.size();
}
private synchronized void activate() {
if (scheduledExecutor != null) {
future = scheduledExecutor.scheduleWithFixedDelay(this, 0L, broadCastPeriod, TimeUnit.MILLISECONDS);
}
}
@Override
public synchronized void broadcastConnectors() throws Exception {
ActiveMQBuffer buff = ActiveMQBuffers.dynamicBuffer(4096);
buff.writeString(nodeManager.getNodeId().toString());
buff.writeString(uniqueID);
buff.writeInt(connectors.size());
for (TransportConfiguration tcConfig : connectors) {
tcConfig.encode(buff);
}
// Only send as many bytes as we need.
byte[] data = new byte[buff.readableBytes()];
buff.getBytes(buff.readerIndex(), data);
endpoint.broadcast(data);
}
@Override
public void run() {
if (!started) {
return;
}
try {
broadcastConnectors();
loggedBroadcastException = false;
}
catch (Exception e) {
// only log the exception at ERROR level once, even if it fails multiple times in a row - HORNETQ-919
if (!loggedBroadcastException) {
ActiveMQServerLogger.LOGGER.errorBroadcastingConnectorConfigs(e);
loggedBroadcastException = true;
}
else {
ActiveMQServerLogger.LOGGER.debug("Failed to broadcast connector configs...again", e);
}
}
}
}
| |
/* DirectoryManager.java --
Copyright (C) 2000, 2001, 2004, 2005 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package javax.naming.spi;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.StringTokenizer;
import javax.naming.CannotProceedException;
import javax.naming.Context;
import javax.naming.Name;
import javax.naming.NamingException;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.StringRefAddr;
import javax.naming.directory.Attributes;
import javax.naming.directory.DirContext;
/**
* @author Tom Tromey (tromey@redhat.com)
* @date June 25, 2001
*/
public class DirectoryManager extends NamingManager
{
// Can't instantiate this class.
DirectoryManager ()
{
}
public static DirContext getContinuationDirContext (CannotProceedException c)
throws NamingException
{
return (DirContext) getContinuationContext (c);
}
// Try to create an object using the factory. Return null on
// failure.
private static Object tryCreateObject (ObjectFactory factory,
Object refInfo,
Name name,
Context nameCtx,
Hashtable environment,
Attributes attrs)
throws Exception
{
if (factory instanceof DirObjectFactory)
{
DirObjectFactory dof = (DirObjectFactory) factory;
return dof.getObjectInstance (refInfo, name, nameCtx,
environment, attrs);
}
else
return factory.getObjectInstance (refInfo, name, nameCtx,
environment);
}
public static Object getObjectInstance (Object refInfo, Name name,
Context nameCtx,
Hashtable environment,
Attributes attrs)
throws Exception
{
ObjectFactory factory = null;
if (ofb != null)
factory = ofb.createObjectFactory (refInfo, environment);
else
{
// First see if we have a Reference or a Referenceable. If so
// we do some special processing.
Object ref2 = refInfo;
if (refInfo instanceof Referenceable)
ref2 = ((Referenceable) refInfo).getReference ();
if (ref2 instanceof Reference)
{
Reference ref = (Reference) ref2;
// If we have a factory class name then we use that.
String fClass = ref.getFactoryClassName ();
if (fClass != null)
{
// Exceptions here are passed to the caller.
Class k = Class.forName (fClass);
factory = (ObjectFactory) k.newInstance ();
}
else
{
// There's no factory class name. If the address is a
// StringRefAddr with address type `URL', then we try
// the URL's context factory.
Enumeration e = ref.getAll ();
while (e.hasMoreElements ())
{
RefAddr ra = (RefAddr) e.nextElement ();
if (ra instanceof StringRefAddr
&& "URL".equals (ra.getType ()))
{
factory
= (ObjectFactory) getURLContext (refInfo,
name,
nameCtx,
(String) ra.getContent (),
environment);
Object obj = tryCreateObject (factory,
refInfo,
name,
nameCtx,
environment,
attrs);
if (obj != null)
return obj;
}
}
// Have to try the next step.
factory = null;
}
}
// Now look at OBJECT_FACTORIES to find the factory.
if (factory == null)
{
StringTokenizer tokens = getPlusPath (Context.OBJECT_FACTORIES,
environment, nameCtx);
while (tokens.hasMoreTokens ())
{
String klassName = tokens.nextToken ();
Class k = Class.forName (klassName);
factory = (ObjectFactory) k.newInstance ();
Object obj = tryCreateObject (factory, refInfo, name,
nameCtx, environment, attrs);
if (obj != null)
return obj;
}
// Failure.
return refInfo;
}
}
if (factory == null)
return refInfo;
Object obj = tryCreateObject (factory, refInfo, name,
nameCtx, environment, attrs);
return obj == null ? refInfo : obj;
}
public static DirStateFactory.Result getStateToBind (Object obj,
Name name,
Context nameCtx,
Hashtable environment,
Attributes attrs)
throws NamingException
{
StringTokenizer tokens = getPlusPath (Context.STATE_FACTORIES,
environment, nameCtx);
while (tokens.hasMoreTokens ())
{
String klassName = tokens.nextToken ();
try
{
Class k = Class.forName (klassName);
StateFactory factory = (StateFactory) k.newInstance ();
DirStateFactory.Result result = null;
if (factory instanceof DirStateFactory)
{
DirStateFactory dsf = (DirStateFactory) factory;
result = dsf.getStateToBind (obj, name, nameCtx, environment,
attrs);
}
else
{
Object o = factory.getStateToBind (obj, name, nameCtx,
environment);
if (o != null)
result = new DirStateFactory.Result (o, attrs);
}
if (result != null)
return result;
}
catch (ClassNotFoundException _1)
{
// Ignore it.
}
catch (ClassCastException _2)
{
// This means that the class we found was not an
// ObjectFactory or that the factory returned something
// which was not a Context.
}
catch (InstantiationException _3)
{
// If we couldn't instantiate the factory we might get
// this.
}
catch (IllegalAccessException _4)
{
// Another possibility when instantiating.
}
}
return new DirStateFactory.Result (obj, attrs);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.managers.communication.GridMessageListener;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.dht.CacheGetFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtAffinityAssignmentRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLockRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLockResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxPrepareRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridPartitionedSingleGetFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicUpdateResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicUpdateResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtForceKeysRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtForceKeysResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.query.GridCacheQueryRequest;
import org.apache.ignite.internal.processors.cache.query.GridCacheQueryResponse;
import org.apache.ignite.internal.util.F0;
import org.apache.ignite.internal.util.GridLeanSet;
import org.apache.ignite.internal.util.GridSpinReadWriteLock;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.internal.GridTopic.TOPIC_CACHE;
/**
* Cache communication manager.
*/
public class GridCacheIoManager extends GridCacheSharedManagerAdapter {
/** Communication topic prefix for distributed queries. */
private static final String QUERY_TOPIC_PREFIX = "QUERY";
/** Message ID generator. */
private static final AtomicLong idGen = new AtomicLong();
/** Delay in milliseconds between retries. */
private long retryDelay;
/** Number of retries using to send messages. */
private int retryCnt;
/** Indexed class handlers. */
private Map<Integer, IgniteBiInClosure[]> idxClsHandlers = new HashMap<>();
/** Handler registry. */
private ConcurrentMap<ListenerKey, IgniteBiInClosure<UUID, GridCacheMessage>>
clsHandlers = new ConcurrentHashMap8<>();
/** Ordered handler registry. */
private ConcurrentMap<Object, IgniteBiInClosure<UUID, ? extends GridCacheMessage>> orderedHandlers =
new ConcurrentHashMap8<>();
/** Stopping flag. */
private boolean stopping;
/** Mutex. */
private final GridSpinReadWriteLock rw = new GridSpinReadWriteLock();
/** Deployment enabled. */
private boolean depEnabled;
/** Message listener. */
private GridMessageListener lsnr = new GridMessageListener() {
@Override public void onMessage(final UUID nodeId, Object msg) {
if (log.isDebugEnabled())
log.debug("Received unordered cache communication message [nodeId=" + nodeId +
", locId=" + cctx.localNodeId() + ", msg=" + msg + ']');
final GridCacheMessage cacheMsg = (GridCacheMessage)msg;
IgniteInternalFuture<?> fut = null;
if (cacheMsg.partitionExchangeMessage()) {
if (cacheMsg instanceof GridDhtAffinityAssignmentRequest) {
assert cacheMsg.topologyVersion() != null : cacheMsg;
AffinityTopologyVersion startTopVer = new AffinityTopologyVersion(cctx.localNode().order());
DynamicCacheDescriptor cacheDesc = cctx.cache().cacheDescriptor(cacheMsg.cacheId());
if (cacheDesc != null) {
if (cacheDesc.startTopologyVersion() != null)
startTopVer = cacheDesc.startTopologyVersion();
else if (cacheDesc.receivedFromStartVersion() != null)
startTopVer = cacheDesc.receivedFromStartVersion();
}
// Need to wait for exchange to avoid race between cache start and affinity request.
fut = cctx.exchange().affinityReadyFuture(startTopVer);
if (fut != null && !fut.isDone()) {
if (log.isDebugEnabled()) {
log.debug("Wait for exchange before processing message [msg=" + msg +
", node=" + nodeId +
", waitVer=" + startTopVer +
", cacheDesc=" + cacheDesc + ']');
}
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> fut) {
cctx.kernalContext().closure().runLocalSafe(new Runnable() {
@Override public void run() {
handleMessage(nodeId, cacheMsg);
}
});
}
});
return;
}
}
long locTopVer = cctx.discovery().topologyVersion();
long rmtTopVer = cacheMsg.topologyVersion().topologyVersion();
if (locTopVer < rmtTopVer) {
if (log.isDebugEnabled())
log.debug("Received message has higher topology version [msg=" + msg +
", locTopVer=" + locTopVer + ", rmtTopVer=" + rmtTopVer + ']');
fut = cctx.discovery().topologyFuture(rmtTopVer);
}
}
else {
AffinityTopologyVersion locAffVer = cctx.exchange().readyAffinityVersion();
AffinityTopologyVersion rmtAffVer = cacheMsg.topologyVersion();
if (locAffVer.compareTo(rmtAffVer) < 0) {
if (log.isDebugEnabled())
log.debug("Received message has higher affinity topology version [msg=" + msg +
", locTopVer=" + locAffVer + ", rmtTopVer=" + rmtAffVer + ']');
fut = cctx.exchange().affinityReadyFuture(rmtAffVer);
}
}
if (fut != null && !fut.isDone()) {
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> t) {
cctx.kernalContext().closure().runLocalSafe(new Runnable() {
@Override public void run() {
handleMessage(nodeId, cacheMsg);
}
});
}
});
return;
}
handleMessage(nodeId, cacheMsg);
}
};
/**
* @param nodeId Sender node ID.
* @param cacheMsg Message.
*/
@SuppressWarnings("unchecked")
private void handleMessage(UUID nodeId, GridCacheMessage cacheMsg) {
int msgIdx = cacheMsg.lookupIndex();
IgniteBiInClosure<UUID, GridCacheMessage> c = null;
if (msgIdx >= 0) {
IgniteBiInClosure[] cacheClsHandlers = idxClsHandlers.get(cacheMsg.cacheId());
if (cacheClsHandlers != null)
c = cacheClsHandlers[msgIdx];
}
if (c == null)
c = clsHandlers.get(new ListenerKey(cacheMsg.cacheId(), cacheMsg.getClass()));
if (c == null) {
if (cctx.kernalContext().isStopping()) {
if (log.isDebugEnabled())
log.debug("Received message without registered handler (will ignore) [msg=" + cacheMsg +
", nodeId=" + nodeId + ']');
}
else {
U.warn(log, "Received message without registered handler (will ignore) [msg=" + cacheMsg +
", nodeId=" + nodeId +
", locTopVer=" + cctx.exchange().readyAffinityVersion() +
", msgTopVer=" + cacheMsg.topologyVersion() +
", cacheDesc=" + cctx.cache().cacheDescriptor(cacheMsg.cacheId()) + ']');
}
return;
}
onMessage0(nodeId, cacheMsg, c);
}
/** {@inheritDoc} */
@Override public void start0() throws IgniteCheckedException {
retryDelay = cctx.gridConfig().getNetworkSendRetryDelay();
retryCnt = cctx.gridConfig().getNetworkSendRetryCount();
depEnabled = cctx.gridDeploy().enabled();
cctx.gridIO().addMessageListener(TOPIC_CACHE, lsnr);
}
/** {@inheritDoc} */
@SuppressWarnings("BusyWait")
@Override protected void onKernalStop0(boolean cancel) {
cctx.gridIO().removeMessageListener(TOPIC_CACHE);
for (Object ordTopic : orderedHandlers.keySet())
cctx.gridIO().removeMessageListener(ordTopic);
boolean interrupted = false;
// Busy wait is intentional.
while (true) {
try {
if (rw.tryWriteLock(200, TimeUnit.MILLISECONDS))
break;
else
Thread.sleep(200);
}
catch (InterruptedException ignore) {
// Preserve interrupt status & ignore.
// Note that interrupted flag is cleared.
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
try {
stopping = true;
}
finally {
rw.writeUnlock();
}
}
/**
* @param nodeId Node ID.
* @param cacheMsg Cache message.
* @param c Handler closure.
*/
@SuppressWarnings({"unchecked", "ConstantConditions", "ThrowableResultOfMethodCallIgnored"})
private void onMessage0(final UUID nodeId, final GridCacheMessage cacheMsg,
final IgniteBiInClosure<UUID, GridCacheMessage> c) {
rw.readLock();
try {
if (stopping) {
if (log.isDebugEnabled())
log.debug("Received cache communication message while stopping (will ignore) [nodeId=" +
nodeId + ", msg=" + cacheMsg + ']');
return;
}
if (depEnabled)
cctx.deploy().ignoreOwnership(true);
unmarshall(nodeId, cacheMsg);
if (cacheMsg.classError() != null)
processFailedMessage(nodeId, cacheMsg, c);
else
processMessage(nodeId, cacheMsg, c);
}
catch (Throwable e) {
U.error(log, "Failed to process message [senderId=" + nodeId + ", messageType=" + cacheMsg.getClass() + ']', e);
if (e instanceof Error)
throw (Error)e;
}
finally {
if (depEnabled)
cctx.deploy().ignoreOwnership(false);
rw.readUnlock();
}
}
/**
* Sends response on failed message.
*
* @param nodeId node id.
* @param res response.
* @param cctx shared context.
* @param plc grid io policy.
*/
private void sendResponseOnFailedMessage(UUID nodeId, GridCacheMessage res, GridCacheSharedContext cctx,
byte plc) {
try {
cctx.io().send(nodeId, res, plc);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to send response to node (is node still alive?) [nodeId=" + nodeId +
",res=" + res + ']', e);
}
}
/**
* Processes failed messages.
*
* @param nodeId Node ID.
* @param msg Message.
* @throws IgniteCheckedException If failed.
*/
private void processFailedMessage(UUID nodeId, GridCacheMessage msg, IgniteBiInClosure<UUID, GridCacheMessage> c)
throws IgniteCheckedException {
GridCacheContext ctx = cctx.cacheContext(msg.cacheId());
switch (msg.directType()) {
case 14: {
GridCacheEvictionRequest req = (GridCacheEvictionRequest)msg;
GridCacheEvictionResponse res = new GridCacheEvictionResponse(
ctx.cacheId(),
req.futureId(),
req.classError() != null
);
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 30: {
GridDhtLockRequest req = (GridDhtLockRequest)msg;
GridDhtLockResponse res = new GridDhtLockResponse(
ctx.cacheId(),
req.version(),
req.futureId(),
req.miniId(),
0,
ctx.deploymentEnabled());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 34: {
GridDhtTxPrepareRequest req = (GridDhtTxPrepareRequest)msg;
GridDhtTxPrepareResponse res = new GridDhtTxPrepareResponse(
req.version(),
req.futureId(),
req.miniId(),
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, req.policy());
}
break;
case 38: {
GridDhtAtomicUpdateRequest req = (GridDhtAtomicUpdateRequest)msg;
GridDhtAtomicUpdateResponse res = new GridDhtAtomicUpdateResponse(
ctx.cacheId(),
req.futureVersion(),
ctx.deploymentEnabled());
res.onError(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 40: {
GridNearAtomicUpdateRequest req = (GridNearAtomicUpdateRequest)msg;
GridNearAtomicUpdateResponse res = new GridNearAtomicUpdateResponse(
ctx.cacheId(),
nodeId,
req.futureVersion(),
ctx.deploymentEnabled());
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 42: {
GridDhtForceKeysRequest req = (GridDhtForceKeysRequest)msg;
GridDhtForceKeysResponse res = new GridDhtForceKeysResponse(
ctx.cacheId(),
req.futureId(),
req.miniId(),
ctx.deploymentEnabled()
);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 45: {
processMessage(nodeId,msg,c);// Will be handled by Rebalance Demander.
}
break;
case 49: {
GridNearGetRequest req = (GridNearGetRequest)msg;
GridNearGetResponse res = new GridNearGetResponse(
ctx.cacheId(),
req.futureId(),
req.miniId(),
req.version(),
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 50: {
GridNearGetResponse res = (GridNearGetResponse)msg;
CacheGetFuture fut = (CacheGetFuture)ctx.mvcc().future(res.futureId());
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
res.error(res.classError());
fut.onResult(nodeId, res);
}
break;
case 51: {
GridNearLockRequest req = (GridNearLockRequest)msg;
GridNearLockResponse res = new GridNearLockResponse(
ctx.cacheId(),
req.version(),
req.futureId(),
req.miniId(),
false,
0,
req.classError(),
null,
ctx.deploymentEnabled());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 55: {
GridNearTxPrepareRequest req = (GridNearTxPrepareRequest)msg;
GridNearTxPrepareResponse res = new GridNearTxPrepareResponse(
req.version(),
req.futureId(),
req.miniId(),
req.version(),
req.version(),
null,
null,
null,
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, req.policy());
}
break;
case 58: {
GridCacheQueryRequest req = (GridCacheQueryRequest)msg;
GridCacheQueryResponse res = new GridCacheQueryResponse(
req.cacheId(),
req.id(),
req.classError(),
cctx.deploymentEnabled());
cctx.io().sendOrderedMessage(
ctx.node(nodeId),
TOPIC_CACHE.topic(QUERY_TOPIC_PREFIX, nodeId, req.id()),
res,
ctx.ioPolicy(),
Long.MAX_VALUE);
}
break;
case 114: {
processMessage(nodeId,msg,c);// Will be handled by Rebalance Demander.
}
break;
case 116: {
GridNearSingleGetRequest req = (GridNearSingleGetRequest)msg;
GridNearSingleGetResponse res = new GridNearSingleGetResponse(
ctx.cacheId(),
req.futureId(),
req.topologyVersion(),
null,
false,
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 117: {
GridNearSingleGetResponse res = (GridNearSingleGetResponse)msg;
GridPartitionedSingleGetFuture fut = (GridPartitionedSingleGetFuture)ctx.mvcc()
.future(new IgniteUuid(IgniteUuid.VM_ID, res.futureId()));
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
res.error(res.classError());
fut.onResult(nodeId, res);
}
break;
default:
throw new IgniteCheckedException("Failed to send response to node. Unsupported direct type [message="
+ msg + "]", msg.classError());
}
}
/**
* @param nodeId Node ID.
* @param msg Message.
* @param c Closure.
*/
private void processMessage(UUID nodeId, GridCacheMessage msg, IgniteBiInClosure<UUID, GridCacheMessage> c) {
try {
// We will not end up with storing a bunch of new UUIDs
// in each cache entry, since node ID is stored in NIO session
// on handshake.
c.apply(nodeId, msg);
if (log.isDebugEnabled())
log.debug("Finished processing cache communication message [nodeId=" + nodeId + ", msg=" + msg + ']');
}
catch (Throwable e) {
U.error(log, "Failed processing message [senderId=" + nodeId + ", msg=" + msg + ']', e);
if (e instanceof Error)
throw e;
}
finally {
// Reset thread local context.
cctx.tm().resetContext();
cctx.mvcc().contextReset();
// Unwind eviction notifications.
CU.unwindEvicts(cctx);
}
}
/**
* Pre-processes message prior to send.
*
* @param msg Message to send.
* @param destNodeId Destination node ID.
* @return {@code True} if should send message.
* @throws IgniteCheckedException If failed.
*/
private boolean onSend(GridCacheMessage msg, @Nullable UUID destNodeId) throws IgniteCheckedException {
if (msg.error() != null && cctx.kernalContext().isStopping())
return false;
if (msg.messageId() < 0)
// Generate and set message ID.
msg.messageId(idGen.incrementAndGet());
if (destNodeId == null || !cctx.localNodeId().equals(destNodeId)) {
msg.prepareMarshal(cctx);
if (msg instanceof GridCacheDeployable && msg.addDeploymentInfo())
cctx.deploy().prepare((GridCacheDeployable)msg);
}
return true;
}
/**
* Sends communication message.
*
* @param node Node to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @throws IgniteCheckedException If sending failed.
* @throws ClusterTopologyCheckedException If receiver left.
*/
@SuppressWarnings("unchecked")
public void send(ClusterNode node, GridCacheMessage msg, byte plc) throws IgniteCheckedException {
assert !node.isLocal();
if (!onSend(msg, node.id()))
return;
if (log.isDebugEnabled())
log.debug("Sending cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
int cnt = 0;
while (cnt <= retryCnt) {
try {
cnt++;
cctx.gridIO().send(node, TOPIC_CACHE, msg, plc);
return;
}
catch (IgniteCheckedException e) {
if (!cctx.discovery().alive(node.id()) || !cctx.discovery().pingNode(node.id()))
throw new ClusterTopologyCheckedException("Node left grid while sending message to: " + node.id(), e);
if (cnt == retryCnt || cctx.kernalContext().isStopping())
throw e;
else if (log.isDebugEnabled())
log.debug("Failed to send message to node (will retry): " + node.id());
}
U.sleep(retryDelay);
}
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
}
/**
* Sends message and automatically accounts for lefts nodes.
*
* @param nodes Nodes to send to.
* @param msg Message to send.
* @param plc IO policy.
* @param fallback Callback for failed nodes.
* @throws IgniteCheckedException If send failed.
*/
@SuppressWarnings({"BusyWait", "unchecked"})
public void safeSend(Collection<? extends ClusterNode> nodes, GridCacheMessage msg, byte plc,
@Nullable IgnitePredicate<ClusterNode> fallback) throws IgniteCheckedException {
assert nodes != null;
assert msg != null;
if (nodes.isEmpty()) {
if (log.isDebugEnabled())
log.debug("Message will not be sent as collection of nodes is empty: " + msg);
return;
}
if (!onSend(msg, null))
return;
if (log.isDebugEnabled())
log.debug("Sending cache message [msg=" + msg + ", nodes=" + U.toShortString(nodes) + ']');
final Collection<UUID> leftIds = new GridLeanSet<>();
int cnt = 0;
while (cnt < retryCnt) {
try {
Collection<? extends ClusterNode> nodesView = F.view(nodes, new P1<ClusterNode>() {
@Override public boolean apply(ClusterNode e) {
return !leftIds.contains(e.id());
}
});
cctx.gridIO().send(nodesView, TOPIC_CACHE, msg, plc);
boolean added = false;
// Even if there is no exception, we still check here, as node could have
// ignored the message during stopping.
for (ClusterNode n : nodes) {
if (!leftIds.contains(n.id()) && !cctx.discovery().alive(n.id())) {
leftIds.add(n.id());
if (fallback != null && !fallback.apply(n))
// If fallback signalled to stop.
return;
added = true;
}
}
if (added) {
if (!F.exist(F.nodeIds(nodes), F0.not(F.contains(leftIds)))) {
if (log.isDebugEnabled())
log.debug("Message will not be sent because all nodes left topology [msg=" + msg +
", nodes=" + U.toShortString(nodes) + ']');
return;
}
}
break;
}
catch (IgniteCheckedException e) {
boolean added = false;
for (ClusterNode n : nodes) {
if (!leftIds.contains(n.id()) &&
(!cctx.discovery().alive(n.id()) || !cctx.discovery().pingNode(n.id()))) {
leftIds.add(n.id());
if (fallback != null && !fallback.apply(n))
// If fallback signalled to stop.
return;
added = true;
}
}
if (!added) {
cnt++;
if (cnt == retryCnt)
throw e;
U.sleep(retryDelay);
}
if (!F.exist(F.nodeIds(nodes), F0.not(F.contains(leftIds)))) {
if (log.isDebugEnabled())
log.debug("Message will not be sent because all nodes left topology [msg=" + msg + ", nodes=" +
U.toShortString(nodes) + ']');
return;
}
if (log.isDebugEnabled())
log.debug("Message send will be retried [msg=" + msg + ", nodes=" + U.toShortString(nodes) +
", leftIds=" + leftIds + ']');
}
}
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", nodes=" + U.toShortString(nodes) + ']');
}
/**
* Sends communication message.
*
* @param nodeId ID of node to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @throws IgniteCheckedException If sending failed.
*/
public void send(UUID nodeId, GridCacheMessage msg, byte plc) throws IgniteCheckedException {
ClusterNode n = cctx.discovery().node(nodeId);
if (n == null)
throw new ClusterTopologyCheckedException("Failed to send message because node left grid [nodeId=" + nodeId +
", msg=" + msg + ']');
send(n, msg, plc);
}
/**
* @param node Destination node.
* @param topic Topic to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @param timeout Timeout to keep a message on receiving queue.
* @throws IgniteCheckedException Thrown in case of any errors.
*/
public void sendOrderedMessage(ClusterNode node, Object topic, GridCacheMessage msg, byte plc,
long timeout) throws IgniteCheckedException {
if (!onSend(msg, node.id()))
return;
int cnt = 0;
while (cnt <= retryCnt) {
try {
cnt++;
cctx.gridIO().sendOrderedMessage(node, topic, msg, plc, timeout, false);
if (log.isDebugEnabled())
log.debug("Sent ordered cache message [topic=" + topic + ", msg=" + msg +
", nodeId=" + node.id() + ']');
return;
}
catch (IgniteCheckedException e) {
if (cctx.discovery().node(node.id()) == null)
throw new ClusterTopologyCheckedException("Node left grid while sending ordered message to: " + node.id(), e);
if (cnt == retryCnt)
throw e;
else if (log.isDebugEnabled())
log.debug("Failed to send message to node (will retry): " + node.id());
}
U.sleep(retryDelay);
}
}
/**
* @return ID that auto-grows based on local counter and counters received from other nodes.
*/
public long nextIoId() {
return idGen.incrementAndGet();
}
/**
* Sends message without retries and node ping in case of error.
*
* @param node Node to send message to.
* @param msg Message.
* @param plc IO policy.
* @throws IgniteCheckedException If send failed.
*/
public void sendNoRetry(ClusterNode node,
GridCacheMessage msg,
byte plc)
throws IgniteCheckedException {
assert node != null;
assert msg != null;
if (!onSend(msg, null))
return;
try {
cctx.gridIO().send(node, TOPIC_CACHE, msg, plc);
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
}
catch (IgniteCheckedException e) {
if (!cctx.discovery().alive(node.id()))
throw new ClusterTopologyCheckedException("Node left grid while sending message to: " + node.id(), e);
else
throw e;
}
}
/**
* Adds message handler.
*
* @param cacheId Cache ID.
* @param type Type of message.
* @param c Handler.
*/
@SuppressWarnings({"unchecked"})
public void addHandler(
int cacheId,
Class<? extends GridCacheMessage> type,
IgniteBiInClosure<UUID, ? extends GridCacheMessage> c) {
int msgIdx = messageIndex(type);
if (msgIdx != -1) {
IgniteBiInClosure[] cacheClsHandlers = idxClsHandlers.get(cacheId);
if (cacheClsHandlers == null) {
cacheClsHandlers = new IgniteBiInClosure[GridCacheMessage.MAX_CACHE_MSG_LOOKUP_INDEX];
idxClsHandlers.put(cacheId, cacheClsHandlers);
}
if (cacheClsHandlers[msgIdx] != null)
throw new IgniteException("Duplicate cache message ID found [cacheId=" + cacheId +
", type=" + type + ']');
cacheClsHandlers[msgIdx] = c;
return;
}
else {
ListenerKey key = new ListenerKey(cacheId, type);
if (clsHandlers.putIfAbsent(key,
(IgniteBiInClosure<UUID, GridCacheMessage>)c) != null)
assert false : "Handler for class already registered [cacheId=" + cacheId + ", cls=" + type +
", old=" + clsHandlers.get(key) + ", new=" + c + ']';
}
IgniteLogger log0 = log;
if (log0 != null && log0.isTraceEnabled())
log0.trace(
"Registered cache communication handler [cacheId=" + cacheId + ", type=" + type +
", msgIdx=" + msgIdx + ", handler=" + c + ']');
}
/**
* @param cacheId Cache ID to remove handlers for.
*/
public void removeHandlers(int cacheId) {
assert cacheId != 0;
idxClsHandlers.remove(cacheId);
for (Iterator<ListenerKey> iter = clsHandlers.keySet().iterator(); iter.hasNext(); ) {
ListenerKey key = iter.next();
if (key.cacheId == cacheId)
iter.remove();
}
}
/**
* @param cacheId Cache ID to remove handlers for.
* @param type Message type.
*/
public void removeHandler(int cacheId, Class<? extends GridCacheMessage> type) {
clsHandlers.remove(new ListenerKey(cacheId, type));
}
/**
* @param msgCls Message class to check.
* @return Message index.
*/
private int messageIndex(Class<?> msgCls) {
try {
Integer msgIdx = U.field(msgCls, GridCacheMessage.CACHE_MSG_INDEX_FIELD_NAME);
if (msgIdx == null || msgIdx < 0)
return -1;
return msgIdx;
}
catch (IgniteCheckedException ignored) {
return -1;
}
}
/**
* Adds ordered message handler.
*
* @param topic Topic.
* @param c Handler.
*/
@SuppressWarnings({"unchecked"})
public void addOrderedHandler(Object topic, IgniteBiInClosure<UUID, ? extends GridCacheMessage> c) {
IgniteLogger log0 = log;
if (orderedHandlers.putIfAbsent(topic, c) == null) {
cctx.gridIO().addMessageListener(topic, new OrderedMessageListener(
(IgniteBiInClosure<UUID, GridCacheMessage>)c));
if (log0 != null && log0.isTraceEnabled())
log0.trace("Registered ordered cache communication handler [topic=" + topic + ", handler=" + c + ']');
}
else if (log0 != null)
U.warn(log0, "Failed to register ordered cache communication handler because it is already " +
"registered for this topic [topic=" + topic + ", handler=" + c + ']');
}
/**
* Removed ordered message handler.
*
* @param topic Topic.
*/
public void removeOrderedHandler(Object topic) {
if (orderedHandlers.remove(topic) != null) {
cctx.gridIO().removeMessageListener(topic);
if (log != null && log.isDebugEnabled())
log.debug("Unregistered ordered cache communication handler for topic:" + topic);
}
else if (log != null)
U.warn(log, "Failed to unregister ordered cache communication handler because it was not found " +
"for topic: " + topic);
}
/**
* @param nodeId Sender node ID.
* @param cacheMsg Message.
*/
@SuppressWarnings({"ErrorNotRethrown", "unchecked"})
private void unmarshall(UUID nodeId, GridCacheMessage cacheMsg) {
if (cctx.localNodeId().equals(nodeId))
return;
GridDeploymentInfo bean = cacheMsg.deployInfo();
if (bean != null) {
assert depEnabled : "Received deployment info while peer class loading is disabled [nodeId=" + nodeId +
", msg=" + cacheMsg + ']';
cctx.deploy().p2pContext(nodeId, bean.classLoaderId(), bean.userVersion(),
bean.deployMode(), bean.participants(), bean.localDeploymentOwner());
if (log.isDebugEnabled())
log.debug("Set P2P context [senderId=" + nodeId + ", msg=" + cacheMsg + ']');
}
try {
cacheMsg.finishUnmarshal(cctx, cctx.deploy().globalLoader());
}
catch (IgniteCheckedException e) {
cacheMsg.onClassError(e);
}
catch (BinaryObjectException e) {
cacheMsg.onClassError(new IgniteCheckedException(e));
}
catch (Error e) {
if (cacheMsg.ignoreClassErrors() && X.hasCause(e, NoClassDefFoundError.class,
UnsupportedClassVersionError.class))
cacheMsg.onClassError(new IgniteCheckedException("Failed to load class during unmarshalling: " + e, e));
else
throw e;
}
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>> ");
X.println(">>> Cache IO manager memory stats [grid=" + cctx.gridName() + ']');
X.println(">>> clsHandlersSize: " + clsHandlers.size());
X.println(">>> orderedHandlersSize: " + orderedHandlers.size());
}
/**
* Ordered message listener.
*/
private class OrderedMessageListener implements GridMessageListener {
/** */
private final IgniteBiInClosure<UUID, GridCacheMessage> c;
/**
* @param c Handler closure.
*/
OrderedMessageListener(IgniteBiInClosure<UUID, GridCacheMessage> c) {
this.c = c;
}
/** {@inheritDoc} */
@SuppressWarnings({"CatchGenericClass", "unchecked"})
@Override public void onMessage(final UUID nodeId, Object msg) {
if (log.isDebugEnabled())
log.debug("Received cache ordered message [nodeId=" + nodeId + ", msg=" + msg + ']');
final GridCacheMessage cacheMsg = (GridCacheMessage)msg;
onMessage0(nodeId, cacheMsg, c);
}
}
/**
*
*/
private static class ListenerKey {
/** Cache ID. */
private int cacheId;
/** Message class. */
private Class<? extends GridCacheMessage> msgCls;
/**
* @param cacheId Cache ID.
* @param msgCls Message class.
*/
private ListenerKey(int cacheId, Class<? extends GridCacheMessage> msgCls) {
this.cacheId = cacheId;
this.msgCls = msgCls;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof ListenerKey))
return false;
ListenerKey that = (ListenerKey)o;
return cacheId == that.cacheId && msgCls.equals(that.msgCls);
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = cacheId;
res = 31 * res + msgCls.hashCode();
return res;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.